summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatthew LeGendre <legendre1@llnl.gov>2016-01-25 10:52:17 -0800
committerMatthew LeGendre <legendre1@llnl.gov>2016-01-25 10:52:17 -0800
commitfa888a4ba15176e9d415ffced41f6f16801f1938 (patch)
treef39f431b0bd37f385353c62d0f6ce1a981abff3f
parentfac4428766fb0a6b6cd357b654215f55df1220d4 (diff)
parent04a8439c39e674abb89d7a3f8dea82810c7ca682 (diff)
downloadspack-fa888a4ba15176e9d415ffced41f6f16801f1938.tar.gz
spack-fa888a4ba15176e9d415ffced41f6f16801f1938.tar.bz2
spack-fa888a4ba15176e9d415ffced41f6f16801f1938.tar.xz
spack-fa888a4ba15176e9d415ffced41f6f16801f1938.zip
Merge branch 'develop' into features/external-packages
Conflicts: lib/spack/spack/cmd/mirror.py lib/spack/spack/concretize.py lib/spack/spack/config.py lib/spack/spack/spec.py lib/spack/spack/stage.py var/spack/packages/mvapich2/package.py
-rw-r--r--.gitignore1
-rw-r--r--.mailmap2
-rw-r--r--.travis.yml26
-rw-r--r--LICENSE2
-rw-r--r--README.md49
-rwxr-xr-xbin/spack40
-rwxr-xr-xbin/spack-python2
-rw-r--r--etc/spack/repos.yaml8
-rw-r--r--lib/spack/docs/conf.py9
-rw-r--r--lib/spack/docs/exts/sphinxcontrib/__init__.py2
-rw-r--r--lib/spack/docs/exts/sphinxcontrib/programoutput.py2
-rw-r--r--lib/spack/docs/getting_started.rst6
-rw-r--r--lib/spack/docs/index.rst4
-rwxr-xr-xlib/spack/env/cc16
l---------lib/spack/env/clang/clang1
l---------lib/spack/env/clang/clang++1
l---------lib/spack/env/gcc/g++1
l---------lib/spack/env/gcc/gcc1
l---------lib/spack/env/gcc/gfortran1
l---------lib/spack/env/intel/icc1
l---------lib/spack/env/intel/icpc1
l---------lib/spack/env/intel/ifort1
l---------lib/spack/env/nag/nagfor1
l---------lib/spack/env/pgi/case-insensitive/pgCC1
l---------lib/spack/env/pgi/pgcc1
l---------lib/spack/env/pgi/pgf771
l---------lib/spack/env/pgi/pgf901
l---------lib/spack/env/xl/xlc1
l---------lib/spack/env/xl/xlc++1
l---------lib/spack/env/xl/xlf1
l---------lib/spack/env/xl/xlf901
-rw-r--r--lib/spack/external/__init__.py2
-rw-r--r--lib/spack/external/argparse.py22
-rw-r--r--lib/spack/external/functools_backport.py (renamed from lib/spack/external/functools.py)0
-rw-r--r--lib/spack/external/jsonschema/COPYING19
-rw-r--r--lib/spack/external/jsonschema/README.rst104
-rw-r--r--lib/spack/external/jsonschema/__init__.py26
-rw-r--r--lib/spack/external/jsonschema/__main__.py2
-rw-r--r--lib/spack/external/jsonschema/_format.py240
-rw-r--r--lib/spack/external/jsonschema/_reflect.py155
-rw-r--r--lib/spack/external/jsonschema/_utils.py213
-rw-r--r--lib/spack/external/jsonschema/_validators.py358
-rw-r--r--lib/spack/external/jsonschema/cli.py72
-rw-r--r--lib/spack/external/jsonschema/compat.py53
-rw-r--r--lib/spack/external/jsonschema/exceptions.py264
-rw-r--r--lib/spack/external/jsonschema/schemas/draft3.json201
-rw-r--r--lib/spack/external/jsonschema/schemas/draft4.json221
-rw-r--r--lib/spack/external/jsonschema/tests/__init__.py0
-rw-r--r--lib/spack/external/jsonschema/tests/compat.py15
-rw-r--r--lib/spack/external/jsonschema/tests/test_cli.py110
-rw-r--r--lib/spack/external/jsonschema/tests/test_exceptions.py382
-rw-r--r--lib/spack/external/jsonschema/tests/test_format.py63
-rw-r--r--lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py290
-rw-r--r--lib/spack/external/jsonschema/tests/test_validators.py786
-rw-r--r--lib/spack/external/jsonschema/validators.py428
-rw-r--r--lib/spack/external/nose/LICENSE504
-rw-r--r--lib/spack/external/nose/__init__.py15
-rw-r--r--lib/spack/external/nose/__main__.py8
-rw-r--r--lib/spack/external/nose/case.py397
-rw-r--r--lib/spack/external/nose/commands.py172
-rw-r--r--lib/spack/external/nose/config.py661
-rw-r--r--lib/spack/external/nose/core.py341
-rw-r--r--lib/spack/external/nose/exc.py9
-rw-r--r--lib/spack/external/nose/ext/__init__.py3
-rw-r--r--lib/spack/external/nose/ext/dtcompat.py2272
-rw-r--r--lib/spack/external/nose/failure.py42
-rw-r--r--lib/spack/external/nose/importer.py167
-rw-r--r--lib/spack/external/nose/inspector.py207
-rw-r--r--lib/spack/external/nose/loader.py623
-rw-r--r--lib/spack/external/nose/plugins/__init__.py190
-rw-r--r--lib/spack/external/nose/plugins/allmodules.py45
-rw-r--r--lib/spack/external/nose/plugins/attrib.py286
-rw-r--r--lib/spack/external/nose/plugins/base.py725
-rw-r--r--lib/spack/external/nose/plugins/builtin.py34
-rw-r--r--lib/spack/external/nose/plugins/capture.py115
-rw-r--r--lib/spack/external/nose/plugins/collect.py94
-rw-r--r--lib/spack/external/nose/plugins/cover.py271
-rw-r--r--lib/spack/external/nose/plugins/debug.py67
-rw-r--r--lib/spack/external/nose/plugins/deprecated.py45
-rw-r--r--lib/spack/external/nose/plugins/doctests.py455
-rw-r--r--lib/spack/external/nose/plugins/errorclass.py210
-rw-r--r--lib/spack/external/nose/plugins/failuredetail.py49
-rw-r--r--lib/spack/external/nose/plugins/isolate.py103
-rw-r--r--lib/spack/external/nose/plugins/logcapture.py245
-rw-r--r--lib/spack/external/nose/plugins/manager.py460
-rw-r--r--lib/spack/external/nose/plugins/multiprocess.py835
-rw-r--r--lib/spack/external/nose/plugins/plugintest.py416
-rw-r--r--lib/spack/external/nose/plugins/prof.py154
-rw-r--r--lib/spack/external/nose/plugins/skip.py63
-rw-r--r--lib/spack/external/nose/plugins/testid.py311
-rw-r--r--lib/spack/external/nose/plugins/xunit.py341
-rw-r--r--lib/spack/external/nose/proxy.py188
-rw-r--r--lib/spack/external/nose/pyversion.py215
-rw-r--r--lib/spack/external/nose/result.py200
-rw-r--r--lib/spack/external/nose/selector.py251
-rw-r--r--lib/spack/external/nose/sphinx/__init__.py1
-rw-r--r--lib/spack/external/nose/sphinx/pluginopts.py189
-rw-r--r--lib/spack/external/nose/suite.py609
-rw-r--r--lib/spack/external/nose/tools/__init__.py15
-rw-r--r--lib/spack/external/nose/tools/nontrivial.py151
-rw-r--r--lib/spack/external/nose/tools/trivial.py54
-rw-r--r--lib/spack/external/nose/twistedtools.py173
-rw-r--r--lib/spack/external/nose/usage.txt115
-rw-r--r--lib/spack/external/nose/util.py668
-rw-r--r--lib/spack/external/ordereddict_backport.py (renamed from lib/spack/external/ordereddict.py)0
-rwxr-xr-xlib/spack/external/pyqver2.py3
-rw-r--r--lib/spack/llnl/util/filesystem.py4
-rw-r--r--lib/spack/llnl/util/lang.py42
-rw-r--r--lib/spack/llnl/util/link_tree.py2
-rw-r--r--lib/spack/llnl/util/lock.py175
-rw-r--r--lib/spack/llnl/util/tty/__init__.py41
-rw-r--r--lib/spack/llnl/util/tty/colify.py27
-rw-r--r--lib/spack/llnl/util/tty/color.py7
-rw-r--r--lib/spack/llnl/util/tty/log.py36
-rw-r--r--lib/spack/spack/__init__.py60
-rw-r--r--lib/spack/spack/architecture.py21
-rw-r--r--lib/spack/spack/build_environment.py66
-rw-r--r--lib/spack/spack/cmd/__init__.py13
-rw-r--r--lib/spack/spack/cmd/activate.py4
-rw-r--r--lib/spack/spack/cmd/arch.py2
-rw-r--r--lib/spack/spack/cmd/bootstrap.py4
-rw-r--r--lib/spack/spack/cmd/cd.py2
-rw-r--r--lib/spack/spack/cmd/checksum.py6
-rw-r--r--lib/spack/spack/cmd/clean.py6
-rw-r--r--lib/spack/spack/cmd/compiler.py69
-rw-r--r--lib/spack/spack/cmd/compilers.py7
-rw-r--r--lib/spack/spack/cmd/config.py16
-rw-r--r--lib/spack/spack/cmd/create.py121
-rw-r--r--lib/spack/spack/cmd/deactivate.py8
-rw-r--r--lib/spack/spack/cmd/dependents.py4
-rw-r--r--lib/spack/spack/cmd/diy.py60
-rw-r--r--lib/spack/spack/cmd/doc.py2
-rw-r--r--lib/spack/spack/cmd/edit.py62
-rw-r--r--lib/spack/spack/cmd/env.py4
-rw-r--r--lib/spack/spack/cmd/extensions.py19
-rw-r--r--lib/spack/spack/cmd/fetch.py19
-rw-r--r--lib/spack/spack/cmd/find.py52
-rw-r--r--lib/spack/spack/cmd/graph.py4
-rw-r--r--lib/spack/spack/cmd/help.py2
-rw-r--r--lib/spack/spack/cmd/info.py20
-rw-r--r--lib/spack/spack/cmd/install.py21
-rw-r--r--lib/spack/spack/cmd/list.py6
-rw-r--r--lib/spack/spack/cmd/load.py4
-rw-r--r--lib/spack/spack/cmd/location.py12
-rw-r--r--lib/spack/spack/cmd/md5.py4
-rw-r--r--lib/spack/spack/cmd/mirror.py75
-rw-r--r--lib/spack/spack/cmd/module.py8
-rw-r--r--lib/spack/spack/cmd/package-list.py6
-rw-r--r--lib/spack/spack/cmd/patch.py6
-rw-r--r--lib/spack/spack/cmd/pkg.py8
-rw-r--r--lib/spack/spack/cmd/providers.py6
-rw-r--r--lib/spack/spack/cmd/purge.py2
-rw-r--r--lib/spack/spack/cmd/python.py14
-rw-r--r--lib/spack/spack/cmd/reindex.py31
-rw-r--r--lib/spack/spack/cmd/repo.py218
-rw-r--r--lib/spack/spack/cmd/restage.py6
-rw-r--r--lib/spack/spack/cmd/spec.py4
-rw-r--r--lib/spack/spack/cmd/stage.py6
-rw-r--r--lib/spack/spack/cmd/test-install.py211
-rw-r--r--lib/spack/spack/cmd/test.py22
-rw-r--r--lib/spack/spack/cmd/uninstall.py99
-rw-r--r--lib/spack/spack/cmd/unload.py4
-rw-r--r--lib/spack/spack/cmd/unuse.py4
-rw-r--r--lib/spack/spack/cmd/url-parse.py75
-rw-r--r--lib/spack/spack/cmd/urls.py4
-rw-r--r--lib/spack/spack/cmd/use.py4
-rw-r--r--lib/spack/spack/cmd/versions.py4
-rw-r--r--lib/spack/spack/compiler.py4
-rw-r--r--lib/spack/spack/compilers/__init__.py166
-rw-r--r--lib/spack/spack/compilers/clang.py8
-rw-r--r--lib/spack/spack/compilers/gcc.py8
-rw-r--r--lib/spack/spack/compilers/intel.py8
-rw-r--r--lib/spack/spack/compilers/nag.py33
-rw-r--r--lib/spack/spack/compilers/pgi.py8
-rw-r--r--lib/spack/spack/compilers/xl.py8
-rw-r--r--lib/spack/spack/concretize.py78
-rw-r--r--lib/spack/spack/config.py793
-rw-r--r--lib/spack/spack/database.py632
-rw-r--r--lib/spack/spack/directives.py66
-rw-r--r--lib/spack/spack/directory_layout.py10
-rw-r--r--lib/spack/spack/error.py6
-rw-r--r--lib/spack/spack/fetch_strategy.py30
-rw-r--r--lib/spack/spack/graph.py4
-rw-r--r--lib/spack/spack/hooks/__init__.py2
-rw-r--r--lib/spack/spack/hooks/dotkit.py2
-rw-r--r--lib/spack/spack/hooks/extensions.py6
-rw-r--r--lib/spack/spack/hooks/tclmodule.py2
-rw-r--r--lib/spack/spack/mirror.py85
-rw-r--r--lib/spack/spack/modules.py6
-rw-r--r--lib/spack/spack/multimethod.py2
-rw-r--r--lib/spack/spack/package.py228
-rw-r--r--lib/spack/spack/packages.py251
-rw-r--r--lib/spack/spack/parse.py2
-rw-r--r--lib/spack/spack/patch.py8
-rw-r--r--lib/spack/spack/preferred_packages.py30
-rw-r--r--lib/spack/spack/repository.py747
-rw-r--r--lib/spack/spack/resource.py41
-rw-r--r--lib/spack/spack/spec.py142
-rw-r--r--lib/spack/spack/stage.py65
-rw-r--r--lib/spack/spack/test/__init__.py49
-rw-r--r--lib/spack/spack/test/cc.py8
-rw-r--r--lib/spack/spack/test/concretize.py18
-rw-r--r--lib/spack/spack/test/config.py100
-rw-r--r--lib/spack/spack/test/configure_guess.py2
-rw-r--r--lib/spack/spack/test/database.py353
-rw-r--r--lib/spack/spack/test/directory_layout.py31
-rw-r--r--lib/spack/spack/test/git_fetch.py4
-rw-r--r--lib/spack/spack/test/hg_fetch.py4
-rw-r--r--lib/spack/spack/test/install.py4
-rw-r--r--lib/spack/spack/test/link_tree.py2
-rw-r--r--lib/spack/spack/test/lock.py266
-rw-r--r--lib/spack/spack/test/make_executable.py50
-rw-r--r--lib/spack/spack/test/mirror.py4
-rw-r--r--lib/spack/spack/test/mock_packages_test.py99
-rw-r--r--lib/spack/spack/test/mock_repo.py6
-rw-r--r--lib/spack/spack/test/multimethod.py50
-rw-r--r--lib/spack/spack/test/namespace_trie.py114
-rw-r--r--lib/spack/spack/test/optional_deps.py2
-rw-r--r--lib/spack/spack/test/package_sanity.py17
-rw-r--r--lib/spack/spack/test/packages.py58
-rw-r--r--lib/spack/spack/test/python_version.py12
-rw-r--r--lib/spack/spack/test/spec_dag.py22
-rw-r--r--lib/spack/spack/test/spec_semantics.py47
-rw-r--r--lib/spack/spack/test/spec_syntax.py2
-rw-r--r--lib/spack/spack/test/spec_yaml.py2
-rw-r--r--lib/spack/spack/test/stage.py2
-rw-r--r--lib/spack/spack/test/svn_fetch.py6
-rw-r--r--lib/spack/spack/test/tally_plugin.py59
-rw-r--r--lib/spack/spack/test/unit_install.py125
-rw-r--r--lib/spack/spack/test/url_extrapolate.py2
-rw-r--r--lib/spack/spack/test/url_parse.py2
-rw-r--r--lib/spack/spack/test/url_substitution.py3
-rw-r--r--lib/spack/spack/test/versions.py2
-rw-r--r--lib/spack/spack/test/yaml.py93
-rw-r--r--lib/spack/spack/url.py8
-rw-r--r--lib/spack/spack/util/__init__.py2
-rw-r--r--lib/spack/spack/util/compression.py2
-rw-r--r--lib/spack/spack/util/crypto.py2
-rw-r--r--lib/spack/spack/util/debug.py2
-rw-r--r--lib/spack/spack/util/environment.py2
-rw-r--r--lib/spack/spack/util/executable.py106
-rw-r--r--lib/spack/spack/util/multiproc.py52
-rw-r--r--lib/spack/spack/util/naming.py151
-rw-r--r--lib/spack/spack/util/prefix.py2
-rw-r--r--lib/spack/spack/util/spack_yaml.py201
-rw-r--r--lib/spack/spack/util/string.py2
-rw-r--r--lib/spack/spack/util/web.py142
-rw-r--r--lib/spack/spack/variant.py2
-rw-r--r--lib/spack/spack/version.py4
-rw-r--r--lib/spack/spack/virtual.py19
-rw-r--r--share/spack/csh/spack.csh2
-rwxr-xr-xshare/spack/logo/favicon.icobin0 -> 1150 bytes
-rw-r--r--share/spack/logo/spack-logo-text-64.pngbin0 -> 18644 bytes
-rw-r--r--share/spack/logo/spack-logo-white-text-48.pngbin0 -> 12201 bytes
-rwxr-xr-xshare/spack/setup-env.csh2
-rwxr-xr-xshare/spack/setup-env.sh16
-rw-r--r--var/spack/mock_configs/site_spackconfig/compilers.yaml12
-rw-r--r--var/spack/mock_configs/site_spackconfig/packages.yaml13
-rw-r--r--var/spack/packages/R/package.py33
-rw-r--r--var/spack/packages/binutils/package.py17
-rw-r--r--var/spack/packages/boost/package.py66
-rw-r--r--var/spack/packages/bzip2/package.py34
-rw-r--r--var/spack/packages/cube/package.py55
-rw-r--r--var/spack/packages/czmq/package.py19
-rw-r--r--var/spack/packages/git/package.py27
-rw-r--r--var/spack/packages/hdf5/package.py41
-rw-r--r--var/spack/packages/libpciaccess/package.py21
-rw-r--r--var/spack/packages/libxml2/package.py20
-rw-r--r--var/spack/packages/llvm/package.py54
-rw-r--r--var/spack/packages/metis/package.py27
-rw-r--r--var/spack/packages/mrnet/package.py20
-rw-r--r--var/spack/packages/mvapich2/package.py117
-rw-r--r--var/spack/packages/netcdf/package.py28
-rw-r--r--var/spack/packages/opari2/package.py65
-rw-r--r--var/spack/packages/openmpi/package.py41
-rw-r--r--var/spack/packages/openssl/package.py26
-rw-r--r--var/spack/packages/otf2/package.py74
-rw-r--r--var/spack/packages/parmetis/package.py26
-rw-r--r--var/spack/packages/py-numpy/package.py28
-rw-r--r--var/spack/packages/scalasca/package.py65
-rw-r--r--var/spack/packages/scorep/package.py74
-rw-r--r--var/spack/packages/scotch/package.py40
-rw-r--r--var/spack/packages/tau/package.py36
-rw-r--r--var/spack/packages/zsh/package.py16
-rw-r--r--var/spack/repos/builtin.mock/packages/a/package.py (renamed from var/spack/mock_packages/a/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/b/package.py (renamed from var/spack/mock_packages/b/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/c/package.py (renamed from var/spack/mock_packages/c/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/callpath/package.py (renamed from var/spack/mock_packages/callpath/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/direct_mpich/package.py (renamed from var/spack/mock_packages/direct_mpich/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/dyninst/package.py (renamed from var/spack/mock_packages/dyninst/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/e/package.py (renamed from var/spack/mock_packages/e/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/externalprereq/package.py (renamed from var/spack/mock_packages/externalprereq/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/externaltest/package.py (renamed from var/spack/mock_packages/externaltest/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/externaltool/package.py (renamed from var/spack/mock_packages/externaltool/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/externalvirtual/package.py (renamed from var/spack/mock_packages/externalvirtual/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/fake/package.py (renamed from var/spack/mock_packages/fake/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/git-test/package.py (renamed from var/spack/mock_packages/git-test/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/hg-test/package.py (renamed from var/spack/mock_packages/hg-test/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/indirect_mpich/package.py (renamed from var/spack/mock_packages/indirect_mpich/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/libdwarf/package.py (renamed from var/spack/mock_packages/libdwarf/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/libelf/package.py (renamed from var/spack/mock_packages/libelf/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/mpich/package.py (renamed from var/spack/mock_packages/mpich/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/mpich2/package.py (renamed from var/spack/mock_packages/mpich2/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/mpileaks/package.py (renamed from var/spack/mock_packages/mpileaks/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/multimethod/package.py (renamed from var/spack/mock_packages/multimethod/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/netlib-blas/package.py36
-rw-r--r--var/spack/repos/builtin.mock/packages/netlib-lapack/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/openblas/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py (renamed from var/spack/mock_packages/optional-dep-test-2/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py (renamed from var/spack/mock_packages/optional-dep-test-3/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/optional-dep-test/package.py (renamed from var/spack/mock_packages/optional-dep-test/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/svn-test/package.py (renamed from var/spack/mock_packages/svn-test/package.py)0
-rw-r--r--var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py (renamed from var/spack/mock_packages/trivial_install_test_package/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/packages/zmpi/package.py (renamed from var/spack/mock_packages/zmpi/package.py)2
-rw-r--r--var/spack/repos/builtin.mock/repo.yaml2
-rw-r--r--var/spack/repos/builtin/packages/ImageMagick/package.py (renamed from var/spack/packages/ImageMagick/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/Mitos/package.py (renamed from var/spack/packages/Mitos/package.py)13
-rw-r--r--var/spack/repos/builtin/packages/R/package.py49
-rw-r--r--var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch (renamed from var/spack/packages/SAMRAI/no-tool-build.patch)0
-rw-r--r--var/spack/repos/builtin/packages/SAMRAI/package.py (renamed from var/spack/packages/SAMRAI/package.py)10
-rw-r--r--var/spack/repos/builtin/packages/activeharmony/package.py15
-rw-r--r--var/spack/repos/builtin/packages/adept-utils/package.py (renamed from var/spack/packages/adept-utils/package.py)6
-rw-r--r--var/spack/repos/builtin/packages/apex/package.py31
-rw-r--r--var/spack/repos/builtin/packages/arpack/package.py (renamed from var/spack/packages/arpack/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/asciidoc/package.py (renamed from var/spack/packages/asciidoc/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/atk/package.py (renamed from var/spack/packages/atk/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/atlas/package.py (renamed from var/spack/packages/atlas/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/atop/package.py16
-rw-r--r--var/spack/repos/builtin/packages/autoconf/package.py (renamed from var/spack/packages/autoconf/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/automaded/package.py (renamed from var/spack/packages/automaded/package.py)6
-rw-r--r--var/spack/repos/builtin/packages/automake/package.py (renamed from var/spack/packages/automake/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/bear/package.py (renamed from var/spack/packages/bear/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/bib2xhtml/package.py (renamed from var/spack/packages/bib2xhtml/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/binutils/binutilskrell-2.24.patch52
-rw-r--r--var/spack/repos/builtin/packages/binutils/cr16.patch26
-rw-r--r--var/spack/repos/builtin/packages/binutils/package.py40
-rw-r--r--var/spack/repos/builtin/packages/bison/package.py (renamed from var/spack/packages/bison/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/boost/package.py148
-rw-r--r--var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch (renamed from var/spack/packages/bowtie2/bowtie2-2.5.patch)0
-rw-r--r--var/spack/repos/builtin/packages/bowtie2/package.py (renamed from var/spack/packages/bowtie2/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/boxlib/package.py (renamed from var/spack/packages/boxlib/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/bzip2/package.py60
-rw-r--r--var/spack/repos/builtin/packages/cairo/package.py (renamed from var/spack/packages/cairo/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/callpath/package.py (renamed from var/spack/packages/callpath/package.py)6
-rw-r--r--var/spack/repos/builtin/packages/cblas/package.py (renamed from var/spack/packages/cblas/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/cbtf-argonavis/package.py66
-rw-r--r--var/spack/repos/builtin/packages/cbtf-krell/package.py116
-rw-r--r--var/spack/repos/builtin/packages/cbtf-lanl/package.py60
-rw-r--r--var/spack/repos/builtin/packages/cbtf/package.py62
-rw-r--r--var/spack/repos/builtin/packages/cereal/Werror.patch33
-rw-r--r--var/spack/repos/builtin/packages/cereal/package.py34
-rw-r--r--var/spack/repos/builtin/packages/cfitsio/package.py18
-rw-r--r--var/spack/repos/builtin/packages/cgal/package.py73
-rw-r--r--var/spack/repos/builtin/packages/cgm/package.py (renamed from var/spack/packages/cgm/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/cityhash/package.py16
-rw-r--r--var/spack/repos/builtin/packages/cleverleaf/package.py23
-rw-r--r--var/spack/repos/builtin/packages/cloog/package.py (renamed from var/spack/packages/cloog/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/cmake/package.py (renamed from var/spack/packages/cmake/package.py)15
-rw-r--r--var/spack/repos/builtin/packages/coreutils/package.py (renamed from var/spack/packages/coreutils/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/cppcheck/package.py (renamed from var/spack/packages/cppcheck/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/cram/package.py (renamed from var/spack/packages/cram/package.py)5
-rw-r--r--var/spack/repos/builtin/packages/cscope/package.py (renamed from var/spack/packages/cscope/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/cube/package.py55
-rw-r--r--var/spack/repos/builtin/packages/curl/package.py25
-rw-r--r--var/spack/repos/builtin/packages/czmq/package.py33
-rw-r--r--var/spack/repos/builtin/packages/damselfly/package.py14
-rw-r--r--var/spack/repos/builtin/packages/dbus/package.py (renamed from var/spack/packages/dbus/package.py)5
-rw-r--r--var/spack/repos/builtin/packages/docbook-xml/package.py (renamed from var/spack/packages/docbook-xml/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/doxygen/package.py (renamed from var/spack/packages/doxygen/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/dri2proto/package.py (renamed from var/spack/packages/dri2proto/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/dtcmp/package.py (renamed from var/spack/packages/dtcmp/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/dyninst/package.py (renamed from var/spack/packages/dyninst/package.py)7
-rw-r--r--var/spack/repos/builtin/packages/eigen/package.py68
-rw-r--r--var/spack/repos/builtin/packages/elfutils/package.py (renamed from var/spack/packages/elfutils/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/elpa/package.py55
-rw-r--r--var/spack/repos/builtin/packages/expat/package.py17
-rw-r--r--var/spack/repos/builtin/packages/extrae/package.py (renamed from var/spack/packages/extrae/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/exuberant-ctags/package.py (renamed from var/spack/packages/exuberant-ctags/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/fftw/package.py75
-rw-r--r--var/spack/repos/builtin/packages/fish/package.py (renamed from var/spack/packages/fish/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/flex/package.py (renamed from var/spack/packages/flex/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/fltk/font.patch44
-rw-r--r--var/spack/repos/builtin/packages/fltk/package.py58
-rw-r--r--var/spack/repos/builtin/packages/flux/package.py (renamed from var/spack/packages/flux/package.py)7
-rw-r--r--var/spack/repos/builtin/packages/fontconfig/package.py (renamed from var/spack/packages/fontconfig/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/freetype/package.py (renamed from var/spack/packages/freetype/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/gasnet/package.py (renamed from var/spack/packages/gasnet/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/gcc/package.py (renamed from var/spack/packages/gcc/package.py)85
-rw-r--r--var/spack/repos/builtin/packages/gdb/package.py48
-rw-r--r--var/spack/repos/builtin/packages/gdk-pixbuf/package.py (renamed from var/spack/packages/gdk-pixbuf/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/geos/package.py (renamed from var/spack/packages/geos/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/gflags/package.py (renamed from var/spack/packages/gflags/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/ghostscript/package.py (renamed from var/spack/packages/ghostscript/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/git/package.py59
-rw-r--r--var/spack/repos/builtin/packages/glib/package.py (renamed from var/spack/packages/glib/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/glm/package.py19
-rw-r--r--var/spack/repos/builtin/packages/global/package.py (renamed from var/spack/packages/global/package.py)6
-rw-r--r--var/spack/repos/builtin/packages/glog/package.py (renamed from var/spack/packages/glog/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/glpk/package.py53
-rw-r--r--var/spack/repos/builtin/packages/gmp/package.py (renamed from var/spack/packages/gmp/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/gmsh/package.py84
-rw-r--r--var/spack/repos/builtin/packages/gnuplot/package.py61
-rw-r--r--var/spack/repos/builtin/packages/gnutls/package.py (renamed from var/spack/packages/gnutls/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/gperf/package.py (renamed from var/spack/packages/gperf/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/gperftools/package.py (renamed from var/spack/packages/gperftools/package.py)5
-rw-r--r--var/spack/repos/builtin/packages/graphlib/package.py (renamed from var/spack/packages/graphlib/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/graphviz/package.py (renamed from var/spack/packages/graphviz/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/gsl/package.py46
-rw-r--r--var/spack/repos/builtin/packages/gtkplus/package.py (renamed from var/spack/packages/gtkplus/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/harfbuzz/package.py (renamed from var/spack/packages/harfbuzz/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/hdf5/package.py130
-rw-r--r--var/spack/repos/builtin/packages/hwloc/package.py (renamed from var/spack/packages/hwloc/package.py)7
-rw-r--r--var/spack/repos/builtin/packages/hypre/package.py (renamed from var/spack/packages/hypre/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/icu/package.py (renamed from var/spack/packages/icu/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/icu4c/package.py (renamed from var/spack/packages/icu4c/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/isl/package.py (renamed from var/spack/packages/isl/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/jdk/package.py (renamed from var/spack/packages/jdk/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/jemalloc/package.py24
-rw-r--r--var/spack/repos/builtin/packages/jpeg/package.py (renamed from var/spack/packages/jpeg/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/judy/package.py15
-rw-r--r--var/spack/repos/builtin/packages/julia/gc.patch11
-rw-r--r--var/spack/repos/builtin/packages/julia/package.py70
-rw-r--r--var/spack/repos/builtin/packages/launchmon/package.py (renamed from var/spack/packages/launchmon/package.py)14
-rw-r--r--var/spack/repos/builtin/packages/launchmon/patch.lmon_install_dir (renamed from var/spack/packages/launchmon/patch.lmon_install_dir)0
-rw-r--r--var/spack/repos/builtin/packages/lcms/package.py (renamed from var/spack/packages/lcms/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/leveldb/package.py (renamed from var/spack/packages/leveldb/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libNBC/package.py (renamed from var/spack/packages/libNBC/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/libarchive/package.py (renamed from var/spack/packages/libarchive/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libcerf/package.py42
-rw-r--r--var/spack/repos/builtin/packages/libcircle/package.py (renamed from var/spack/packages/libcircle/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libdrm/package.py (renamed from var/spack/packages/libdrm/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libdwarf/package.py (renamed from var/spack/packages/libdwarf/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/libedit/package.py14
-rw-r--r--var/spack/repos/builtin/packages/libelf/package.py (renamed from var/spack/packages/libelf/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/libevent/package.py (renamed from var/spack/packages/libevent/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libffi/package.py (renamed from var/spack/packages/libffi/package.py)7
-rw-r--r--var/spack/repos/builtin/packages/libgcrypt/package.py (renamed from var/spack/packages/libgcrypt/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libgd/package.py (renamed from var/spack/packages/clang/package.py)29
-rw-r--r--var/spack/repos/builtin/packages/libgpg-error/package.py (renamed from var/spack/packages/libgpg-error/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libjpeg-turbo/package.py (renamed from var/spack/packages/libjpeg-turbo/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libjson-c/package.py (renamed from var/spack/packages/libjson-c/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libmng/package.py (renamed from var/spack/packages/libmng/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0000.patch18
-rw-r--r--var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0001.patch395
-rw-r--r--var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0002.patch106
-rw-r--r--var/spack/repos/builtin/packages/libmonitor/package.py (renamed from var/spack/packages/libmonitor/package.py)9
-rw-r--r--var/spack/repos/builtin/packages/libpciaccess/package.py23
-rw-r--r--var/spack/repos/builtin/packages/libpng/package.py (renamed from var/spack/packages/libpng/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libsodium/package.py (renamed from var/spack/packages/libsodium/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libtiff/package.py (renamed from var/spack/packages/libtiff/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libtool/package.py (renamed from var/spack/packages/libtool/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/libunwind/package.py (renamed from var/spack/packages/libunwind/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/libuuid/package.py (renamed from var/spack/packages/libuuid/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libxcb/package.py (renamed from var/spack/packages/libxcb/package.py)12
-rw-r--r--var/spack/repos/builtin/packages/libxml2/package.py30
-rw-r--r--var/spack/repos/builtin/packages/libxshmfence/package.py (renamed from var/spack/packages/libxshmfence/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/libxslt/package.py (renamed from var/spack/packages/libxslt/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/llvm-lld/package.py (renamed from var/spack/packages/llvm-lld/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/llvm/package.py218
-rw-r--r--var/spack/repos/builtin/packages/lmdb/package.py (renamed from var/spack/packages/lmdb/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/lmod/package.py26
-rw-r--r--var/spack/repos/builtin/packages/lua/package.py (renamed from var/spack/packages/lua/package.py)12
-rw-r--r--var/spack/repos/builtin/packages/lwgrp/package.py (renamed from var/spack/packages/lwgrp/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/lwm2/package.py (renamed from var/spack/packages/lwm2/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/matio/package.py15
-rw-r--r--var/spack/repos/builtin/packages/mbedtls/package.py22
-rw-r--r--var/spack/repos/builtin/packages/memaxes/package.py (renamed from var/spack/packages/memaxes/package.py)5
-rw-r--r--var/spack/repos/builtin/packages/mesa/package.py (renamed from var/spack/packages/mesa/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/metis/package.py83
-rw-r--r--var/spack/repos/builtin/packages/mpc/package.py (renamed from var/spack/packages/mpc/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/mpe2/mpe2.patch (renamed from var/spack/packages/mpe2/mpe2.patch)0
-rw-r--r--var/spack/repos/builtin/packages/mpe2/package.py (renamed from var/spack/packages/mpe2/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/mpfr/package.py (renamed from var/spack/packages/mpfr/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/mpibash/mpibash-4.3.patch (renamed from var/spack/packages/mpibash/mpibash-4.3.patch)0
-rw-r--r--var/spack/repos/builtin/packages/mpibash/package.py (renamed from var/spack/packages/mpibash/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/mpich/package.py (renamed from var/spack/packages/mpich/package.py)33
-rw-r--r--var/spack/repos/builtin/packages/mpileaks/package.py (renamed from var/spack/packages/mpileaks/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/mrnet/package.py26
-rw-r--r--var/spack/repos/builtin/packages/mumps/Makefile.inc38
-rw-r--r--var/spack/repos/builtin/packages/mumps/package.py139
-rw-r--r--var/spack/repos/builtin/packages/munge/package.py (renamed from var/spack/packages/munge/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/muster/package.py (renamed from var/spack/packages/muster/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/mvapich2/ad_lustre_rwcontig_open_source.patch (renamed from var/spack/packages/mvapich2/ad_lustre_rwcontig_open_source.patch)0
-rw-r--r--var/spack/repos/builtin/packages/mvapich2/package.py198
-rw-r--r--var/spack/repos/builtin/packages/nasm/package.py (renamed from var/spack/packages/nasm/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/ncdu/package.py28
-rw-r--r--var/spack/repos/builtin/packages/ncurses/package.py (renamed from var/spack/packages/ncurses/package.py)27
-rw-r--r--var/spack/repos/builtin/packages/ncurses/patch_gcc_5.txt12
-rw-r--r--var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch25
-rw-r--r--var/spack/repos/builtin/packages/netcdf/package.py28
-rw-r--r--var/spack/repos/builtin/packages/netgauge/package.py (renamed from var/spack/packages/netgauge/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/netlib-blas/package.py (renamed from var/spack/packages/netlib-blas/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/netlib-lapack/package.py (renamed from var/spack/packages/netlib-lapack/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/netlib-scalapack/package.py50
-rw-r--r--var/spack/repos/builtin/packages/nettle/package.py (renamed from var/spack/packages/nettle/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/ninja/package.py22
-rw-r--r--var/spack/repos/builtin/packages/ompss/package.py (renamed from var/spack/packages/ompss/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/ompt-openmp/package.py23
-rw-r--r--var/spack/repos/builtin/packages/opari2/package.py48
-rw-r--r--var/spack/repos/builtin/packages/openblas/package.py25
-rw-r--r--var/spack/repos/builtin/packages/openmpi/ad_lustre_rwcontig_open_source.patch (renamed from var/spack/packages/openmpi/ad_lustre_rwcontig_open_source.patch)0
-rw-r--r--var/spack/repos/builtin/packages/openmpi/configure.patch31
-rw-r--r--var/spack/repos/builtin/packages/openmpi/llnl-platforms.patch (renamed from var/spack/packages/openmpi/llnl-platforms.patch)0
-rw-r--r--var/spack/repos/builtin/packages/openmpi/package.py135
-rw-r--r--var/spack/repos/builtin/packages/openspeedshop/package.py216
-rw-r--r--var/spack/repos/builtin/packages/openssl/package.py40
-rw-r--r--var/spack/repos/builtin/packages/otf/package.py (renamed from var/spack/packages/otf/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/otf2/package.py55
-rw-r--r--var/spack/repos/builtin/packages/pango/package.py (renamed from var/spack/packages/pango/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/papi/package.py (renamed from var/spack/packages/papi/package.py)14
-rw-r--r--var/spack/repos/builtin/packages/paraver/package.py (renamed from var/spack/packages/paraver/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/paraview/package.py79
-rw-r--r--var/spack/repos/builtin/packages/parmetis/package.py95
-rw-r--r--var/spack/repos/builtin/packages/parpack/package.py (renamed from var/spack/packages/parpack/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/patchelf/package.py16
-rw-r--r--var/spack/repos/builtin/packages/pcre/package.py (renamed from var/spack/packages/pcre/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/pcre2/package.py15
-rw-r--r--var/spack/repos/builtin/packages/pdt/package.py45
-rw-r--r--var/spack/repos/builtin/packages/petsc/package.py (renamed from var/spack/packages/petsc/package.py)8
-rw-r--r--var/spack/repos/builtin/packages/pidx/package.py (renamed from var/spack/packages/pidx/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/pixman/package.py (renamed from var/spack/packages/pixman/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/pkg-config/package.py (renamed from var/spack/packages/pkg-config/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/pmgr_collective/package.py (renamed from var/spack/packages/pmgr_collective/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/postgresql/package.py (renamed from var/spack/packages/postgresql/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/ppl/package.py (renamed from var/spack/packages/ppl/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/protobuf/package.py (renamed from var/spack/packages/protobuf/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-astropy/package.py28
-rw-r--r--var/spack/repos/builtin/packages/py-basemap/package.py (renamed from var/spack/packages/py-basemap/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/py-biopython/package.py (renamed from var/spack/packages/py-biopython/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-blessings/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-cffi/package.py (renamed from var/spack/packages/py-cffi/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/py-coverage/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-cython/package.py (renamed from var/spack/packages/py-cython/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-dateutil/package.py (renamed from var/spack/packages/py-dateutil/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-epydoc/package.py (renamed from var/spack/packages/py-epydoc/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-funcsigs/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-genders/package.py (renamed from var/spack/packages/py-genders/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-gnuplot/package.py (renamed from var/spack/packages/py-gnuplot/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-h5py/package.py (renamed from var/spack/packages/py-h5py/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-ipython/package.py (renamed from var/spack/packages/py-ipython/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-libxml2/package.py (renamed from var/spack/packages/py-libxml2/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-lockfile/package.py (renamed from var/spack/packages/py-lockfile/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-mako/package.py (renamed from var/spack/packages/py-mako/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-matplotlib/package.py (renamed from var/spack/packages/py-matplotlib/package.py)19
-rw-r--r--var/spack/repos/builtin/packages/py-mock/package.py (renamed from var/spack/packages/py-mock/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/py-mpi4py/package.py (renamed from var/spack/packages/py-mpi4py/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-mx/package.py (renamed from var/spack/packages/py-mx/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-mysqldb1/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-nose/package.py (renamed from var/spack/packages/py-nose/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-numexpr/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-numpy/package.py24
-rw-r--r--var/spack/repos/builtin/packages/py-pandas/package.py (renamed from var/spack/packages/py-pandas/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pbr/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-periodictable/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-pexpect/package.py (renamed from var/spack/packages/py-pexpect/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pil/package.py (renamed from var/spack/packages/py-pil/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pillow/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-pmw/package.py (renamed from var/spack/packages/py-pmw/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pychecker/package.py (renamed from var/spack/packages/py-pychecker/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pycparser/package.py (renamed from var/spack/packages/py-pycparser/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pyelftools/package.py (renamed from var/spack/packages/py-pyelftools/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pygments/package.py (renamed from var/spack/packages/py-pygments/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pylint/package.py (renamed from var/spack/packages/py-pylint/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pypar/package.py (renamed from var/spack/packages/py-pypar/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pyparsing/package.py (renamed from var/spack/packages/py-pyparsing/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pyqt/package.py (renamed from var/spack/packages/py-pyqt/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pyside/package.py (renamed from var/spack/packages/py-pyside/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/py-pytables/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-python-daemon/package.py (renamed from var/spack/packages/py-python-daemon/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-pytz/package.py (renamed from var/spack/packages/py-pytz/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-rpy2/package.py (renamed from var/spack/packages/py-rpy2/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-scientificpython/package.py (renamed from var/spack/packages/py-scientificpython/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-learn/package.py (renamed from var/spack/packages/py-scikit-learn/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-scipy/package.py (renamed from var/spack/packages/py-scipy/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-setuptools/package.py (renamed from var/spack/packages/py-setuptools/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/py-shiboken/package.py (renamed from var/spack/packages/py-shiboken/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-sip/package.py (renamed from var/spack/packages/py-sip/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-six/package.py (renamed from var/spack/packages/py-six/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-sphinx/package.py13
-rw-r--r--var/spack/repos/builtin/packages/py-sympy/package.py (renamed from var/spack/packages/py-sympy/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-tappy/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-twisted/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-urwid/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-virtualenv/package.py (renamed from var/spack/packages/py-virtualenv/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-yapf/package.py (renamed from var/spack/packages/py-yapf/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/python/package.py (renamed from var/spack/packages/python/package.py)40
-rw-r--r--var/spack/repos/builtin/packages/qhull/package.py (renamed from var/spack/packages/qhull/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/qhull/qhull-iterator.patch45
-rw-r--r--var/spack/repos/builtin/packages/qt/package.py (renamed from var/spack/packages/qt/package.py)22
-rw-r--r--var/spack/repos/builtin/packages/qt/qt3krell.patch68
-rw-r--r--var/spack/repos/builtin/packages/qthreads/package.py (renamed from var/spack/packages/qthreads/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/ravel/package.py (renamed from var/spack/packages/ravel/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/readline/package.py (renamed from var/spack/packages/readline/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/rose/add_spack_compiler_recognition.patch (renamed from var/spack/packages/rose/add_spack_compiler_recognition.patch)0
-rw-r--r--var/spack/repos/builtin/packages/rose/package.py (renamed from var/spack/packages/rose/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/rsync/package.py15
-rw-r--r--var/spack/repos/builtin/packages/ruby/package.py (renamed from var/spack/packages/ruby/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/samtools/package.py (renamed from var/spack/packages/samtools/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/samtools/samtools1.2.patch (renamed from var/spack/packages/samtools/samtools1.2.patch)0
-rw-r--r--var/spack/repos/builtin/packages/scalasca/package.py63
-rw-r--r--var/spack/repos/builtin/packages/scorep/package.py72
-rw-r--r--var/spack/repos/builtin/packages/scotch/package.py126
-rw-r--r--var/spack/repos/builtin/packages/scr/package.py (renamed from var/spack/packages/scr/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/silo/package.py (renamed from var/spack/packages/silo/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/snappy/package.py (renamed from var/spack/packages/snappy/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/sparsehash/package.py14
-rw-r--r--var/spack/repos/builtin/packages/spindle/package.py (renamed from var/spack/packages/spindle/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/spot/package.py18
-rw-r--r--var/spack/repos/builtin/packages/sqlite/package.py (renamed from var/spack/packages/sqlite/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/stat/configure_mpicxx.patch (renamed from var/spack/packages/stat/configure_mpicxx.patch)0
-rw-r--r--var/spack/repos/builtin/packages/stat/package.py (renamed from var/spack/packages/stat/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/sundials/package.py (renamed from var/spack/packages/sundials/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/swig/package.py (renamed from var/spack/packages/swig/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/szip/package.py21
-rw-r--r--var/spack/repos/builtin/packages/tar/package.py13
-rw-r--r--var/spack/repos/builtin/packages/task/package.py (renamed from var/spack/packages/task/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/taskd/package.py (renamed from var/spack/packages/taskd/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/tau/package.py139
-rw-r--r--var/spack/repos/builtin/packages/tcl/package.py (renamed from var/spack/packages/tcl/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/texinfo/package.py46
-rw-r--r--var/spack/repos/builtin/packages/the_silver_searcher/package.py (renamed from var/spack/packages/the_silver_searcher/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/thrift/package.py (renamed from var/spack/packages/thrift/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/tk/package.py (renamed from var/spack/packages/tk/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/tmux/package.py (renamed from var/spack/packages/tmux/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/tmuxinator/package.py (renamed from var/spack/packages/tmuxinator/package.py)5
-rw-r--r--var/spack/repos/builtin/packages/trilinos/package.py50
-rw-r--r--var/spack/repos/builtin/packages/uncrustify/package.py (renamed from var/spack/packages/uncrustify/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/util-linux/package.py (renamed from var/spack/packages/util-linux/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/valgrind/package.py55
-rw-r--r--var/spack/repos/builtin/packages/vim/package.py (renamed from var/spack/packages/vim/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/vtk/package.py (renamed from var/spack/packages/vtk/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/wget/package.py (renamed from var/spack/packages/wget/package.py)5
-rw-r--r--var/spack/repos/builtin/packages/wx/package.py (renamed from var/spack/packages/wx/package.py)2
-rw-r--r--var/spack/repos/builtin/packages/wxpropgrid/package.py (renamed from var/spack/packages/wxpropgrid/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/xcb-proto/package.py (renamed from var/spack/packages/xcb-proto/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/xerces-c/package.py36
-rw-r--r--var/spack/repos/builtin/packages/xz/package.py (renamed from var/spack/packages/xz/package.py)8
-rw-r--r--var/spack/repos/builtin/packages/yasm/package.py (renamed from var/spack/packages/yasm/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/zeromq/package.py (renamed from var/spack/packages/zeromq/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/zlib/package.py (renamed from var/spack/packages/zlib/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/zsh/package.py20
-rw-r--r--var/spack/repos/builtin/repo.yaml2
642 files changed, 29977 insertions, 2884 deletions
diff --git a/.gitignore b/.gitignore
index 1c6ca4c99e..4b97de5d50 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,4 @@
/etc/spackconfig
/share/spack/dotkit
/share/spack/modules
+/TAGS
diff --git a/.mailmap b/.mailmap
index 1cc13c1eb1..1b99da32b5 100644
--- a/.mailmap
+++ b/.mailmap
@@ -9,3 +9,5 @@ Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@su
Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
+Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
+Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000000..ab379be486
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,26 @@
+language: python
+python:
+ - "2.6"
+ - "2.7"
+
+# Use new Travis infrastructure (Docker can't sudo yet)
+sudo: false
+
+# No need to install any deps.
+install: true
+
+before_install:
+ # Need this for the git tests to succeed.
+ - git config --global user.email "spack@example.com"
+ - git config --global user.name "Test User"
+
+script:
+ - . share/spack/setup-env.sh
+ - spack test
+
+notifications:
+ email:
+ recipients:
+ - tgamblin@llnl.gov
+ on_success: change
+ on_failure: always
diff --git a/LICENSE b/LICENSE
index 6ad4af5861..bb7ef91b3c 100644
--- a/LICENSE
+++ b/LICENSE
@@ -5,7 +5,7 @@ This file is part of Spack.
Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
LLNL-CODE-647188
-For details, see https://scalability-llnl.github.io/spack
+For details, see https://github.com/llnl/spack
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License (as published by
diff --git a/README.md b/README.md
index 74d618ed2f..bdce345764 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,7 @@
-Spack
-===========
+![image](share/spack/logo/spack-logo-text-64.png "Spack")
+============
+
+[![Build Status](https://travis-ci.org/LLNL/spack.png?branch=develop)](https://travis-ci.org/LLNL/spack)
Spack is a package management tool designed to support multiple
versions and configurations of software on a wide variety of platforms
@@ -13,24 +15,30 @@ can coexist on the same system.
Most importantly, Spack is simple. It offers a simple spec syntax so
that users can specify versions and configuration options
concisely. Spack is also simple for package authors: package files are
-writtin in pure Python, and specs allow package authors to write a
+written in pure Python, and specs allow package authors to write a
single build script for many different builds of the same package.
See the
-[Feature Overview](http://scalability-llnl.github.io/spack/features.html)
+[Feature Overview](http://llnl.github.io/spack/features.html)
for examples and highlights.
To install spack and install your first package:
- $ git clone https://github.com/scalability-llnl/spack.git
+ $ git clone https://github.com/llnl/spack.git
$ cd spack/bin
$ ./spack install libelf
Documentation
----------------
-[Full documentation](http://scalability-llnl.github.io/spack)
-for Spack is also available.
+[**Full documentation**](http://llnl.github.io/spack) for Spack is
+the first place to look.
+
+See also:
+ * [Technical paper](http://www.computer.org/csdl/proceedings/sc/2015/3723/00/2807623.pdf) and
+ [slides](https://tgamblin.github.io/files/Gamblin-Spack-SC15-Talk.pdf) on Spack's design and implementation.
+ * [Short presentation](https://tgamblin.github.io/files/Gamblin-Spack-Lightning-Talk-BOF-SC15.pdf) from the *Getting Scientific Software Installed* BOF session at Supercomputing 2015.
+
Get Involved!
------------------------
@@ -62,21 +70,18 @@ latest stable release.
Authors
----------------
-Spack was written by Todd Gamblin, tgamblin@llnl.gov.
-
-Significant contributions were also made by:
-
- * David Beckingsale
- * David Boehme
- * Alfredo Gimenez
- * Luc Jaulmes
- * Matt Legendre
- * Greg Lee
- * Adam Moody
- * Saravan Pantham
- * Joachim Protze
- * Bob Robey
- * Justin Too
+Many thanks go to Spack's [contributors](https://github.com/llnl/spack/graphs/contributors).
+
+Spack was originally written by Todd Gamblin, tgamblin@llnl.gov.
+
+### Citing Spack
+
+If you are referencing Spack in a publication, please cite the following paper:
+
+ * Todd Gamblin, Matthew P. LeGendre, Michael R. Collette, Gregory L. Lee,
+ Adam Moody, Bronis R. de Supinski, and W. Scott Futral.
+ [**The Spack Package Manager: Bringing Order to HPC Software Chaos**](http://www.computer.org/csdl/proceedings/sc/2015/3723/00/2807623.pdf).
+ In *Supercomputing 2015 (SC’15)*, Austin, Texas, November 15-20 2015. LLNL-CONF-669890.
Release
----------------
diff --git a/bin/spack b/bin/spack
index 5c042edd2d..93263217ac 100755
--- a/bin/spack
+++ b/bin/spack
@@ -7,7 +7,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -38,6 +38,31 @@ SPACK_PREFIX = os.path.dirname(os.path.dirname(SPACK_FILE))
# Allow spack libs to be imported in our scripts
SPACK_LIB_PATH = os.path.join(SPACK_PREFIX, "lib", "spack")
sys.path.insert(0, SPACK_LIB_PATH)
+SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
+sys.path.insert(0, SPACK_EXTERNAL_LIBS)
+
+import warnings
+# Avoid warnings when nose is installed with the python exe being used to run
+# spack. Note this must be done after Spack's external libs directory is added
+# to sys.path.
+with warnings.catch_warnings():
+ warnings.filterwarnings("ignore", ".*nose was already imported")
+ import nose
+
+# Quick and dirty check to clean orphaned .pyc files left over from
+# previous revisions. These files were present in earlier versions of
+# Spack, were removed, but shadow system modules that Spack still
+# imports. If we leave them, Spack will fail in mysterious ways.
+# TODO: more elegant solution for orphaned pyc files.
+orphaned_pyc_files = [os.path.join(SPACK_EXTERNAL_LIBS, n)
+ for n in ('functools.pyc', 'ordereddict.pyc')]
+for pyc_file in orphaned_pyc_files:
+ if not os.path.exists(pyc_file):
+ continue
+ try:
+ os.remove(pyc_file)
+ except OSError as e:
+ print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc
# If there is no working directory, use the spack prefix.
try:
@@ -72,6 +97,8 @@ spec expressions:
parser.add_argument('-d', '--debug', action='store_true',
help="Write out debug logs during compile")
+parser.add_argument('-D', '--pdb', action='store_true',
+ help="Run spack under the pdb debugger")
parser.add_argument('-k', '--insecure', action='store_true',
help="Do not check ssl certificates when downloading.")
parser.add_argument('-m', '--mock', action='store_true',
@@ -113,12 +140,12 @@ def main():
spack.spack_working_dir = working_dir
if args.mock:
- from spack.packages import PackageDB
- spack.db = PackageDB(spack.mock_packages_path)
+ from spack.repository import RepoPath
+ spack.repo.swap(RepoPath(spack.mock_packages_path))
# If the user asked for it, don't check ssl certs.
if args.insecure:
- tty.warn("You asked for --insecure, which does not check SSL certificates or checksums.")
+ tty.warn("You asked for --insecure, which does not check SSL certificates.")
spack.curl.add_default_arg('-k')
# Try to load the particular command asked for and run it
@@ -131,7 +158,7 @@ def main():
sys.stderr.write('\n')
tty.die("Keyboard interrupt.")
- # Allow commands to return values if they want to exit with some ohter code.
+ # Allow commands to return values if they want to exit with some other code.
if return_val is None:
sys.exit(0)
elif isinstance(return_val, int):
@@ -142,5 +169,8 @@ def main():
if args.profile:
import cProfile
cProfile.run('main()', sort='tottime')
+elif args.pdb:
+ import pdb
+ pdb.run('main()')
else:
main()
diff --git a/bin/spack-python b/bin/spack-python
index 8a4b9c175d..e0745e8c58 100755
--- a/bin/spack-python
+++ b/bin/spack-python
@@ -7,7 +7,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/etc/spack/repos.yaml b/etc/spack/repos.yaml
new file mode 100644
index 0000000000..2d4ff54ce6
--- /dev/null
+++ b/etc/spack/repos.yaml
@@ -0,0 +1,8 @@
+# -------------------------------------------------------------------------
+# This is the default spack repository configuration.
+#
+# Changes to this file will affect all users of this spack install,
+# although users can override these settings in their ~/.spack/repos.yaml.
+# -------------------------------------------------------------------------
+repos:
+ - $spack/var/spack/repos/builtin
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index 7303d7fef6..3d2a8251aa 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -43,6 +43,7 @@ import subprocess
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('exts'))
+sys.path.insert(0, os.path.abspath('../external'))
# Add the Spack bin directory to the path so that we can use its output in docs.
spack_root = '../../..'
@@ -149,7 +150,7 @@ html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
-#html_theme_options = [('show_copyright', False)]
+html_theme_options = { 'logo_only' : True }
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ["_themes"]
@@ -163,12 +164,12 @@ html_theme_path = ["_themes"]
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
-#html_logo = None
+html_logo = '../../../share/spack/logo/spack-logo-white-text-48.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
-#html_favicon = None
+html_favicon = '../../../share/spack/logo/favicon.ico'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
diff --git a/lib/spack/docs/exts/sphinxcontrib/__init__.py b/lib/spack/docs/exts/sphinxcontrib/__init__.py
index 838d616eb4..298856746c 100644
--- a/lib/spack/docs/exts/sphinxcontrib/__init__.py
+++ b/lib/spack/docs/exts/sphinxcontrib/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/docs/exts/sphinxcontrib/programoutput.py b/lib/spack/docs/exts/sphinxcontrib/programoutput.py
index ff006acf72..f0fa045c86 100644
--- a/lib/spack/docs/exts/sphinxcontrib/programoutput.py
+++ b/lib/spack/docs/exts/sphinxcontrib/programoutput.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst
index d958d9e74a..67ca18e71a 100644
--- a/lib/spack/docs/getting_started.rst
+++ b/lib/spack/docs/getting_started.rst
@@ -5,11 +5,11 @@ Download
--------------------
Getting spack is easy. You can clone it from the `github repository
-<https://github.com/scalability-llnl/spack>`_ using this command:
+<https://github.com/llnl/spack>`_ using this command:
.. code-block:: sh
- $ git clone https://github.com/scalability-llnl/spack.git
+ $ git clone https://github.com/llnl/spack.git
This will create a directory called ``spack``. We'll assume that the
full path to this directory is in the ``SPACK_ROOT`` environment
@@ -22,7 +22,7 @@ go:
$ spack install libelf
For a richer experience, use Spack's `shell support
-<http://scalability-llnl.github.io/spack/basic_usage.html#environment-modules>`_:
+<http://llnl.github.io/spack/basic_usage.html#environment-modules>`_:
.. code-block:: sh
diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst
index 97c8361421..79757208c9 100644
--- a/lib/spack/docs/index.rst
+++ b/lib/spack/docs/index.rst
@@ -24,12 +24,12 @@ maintain a single file for many different builds of the same package.
See the :doc:`features` for examples and highlights.
Get spack from the `github repository
-<https://github.com/scalability-llnl/spack>`_ and install your first
+<https://github.com/llnl/spack>`_ and install your first
package:
.. code-block:: sh
- $ git clone https://github.com/scalability-llnl/spack.git
+ $ git clone https://github.com/llnl/spack.git
$ cd spack/bin
$ ./spack install libelf
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index fa85bb595e..aacba996b3 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -7,7 +7,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -86,22 +86,22 @@ done
#
command=$(basename "$0")
case "$command" in
- cc|gcc|c89|c99|clang|xlc)
+ cc|c89|c99|gcc|clang|icc|pgcc|xlc)
command="$SPACK_CC"
language="C"
;;
- c++|CC|g++|clang++|xlC)
+ c++|CC|g++|clang++|icpc|pgCC|xlc++)
command="$SPACK_CXX"
language="C++"
;;
- f77|xlf)
- command="$SPACK_F77"
- language="Fortran 77"
- ;;
- fc|f90|f95|xlf90)
+ f90|fc|f95|gfortran|ifort|pgf90|xlf90|nagfor)
command="$SPACK_FC"
language="Fortran 90"
;;
+ f77|gfortran|ifort|pgf77|xlf|nagfor)
+ command="$SPACK_F77"
+ language="Fortran 77"
+ ;;
cpp)
mode=cpp
;;
diff --git a/lib/spack/env/clang/clang b/lib/spack/env/clang/clang
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/clang/clang
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/clang/clang++ b/lib/spack/env/clang/clang++
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/clang/clang++
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/gcc/g++ b/lib/spack/env/gcc/g++
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/gcc/g++
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/gcc/gcc b/lib/spack/env/gcc/gcc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/gcc/gcc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/gcc/gfortran b/lib/spack/env/gcc/gfortran
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/gcc/gfortran
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/intel/icc b/lib/spack/env/intel/icc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/intel/icc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/intel/icpc b/lib/spack/env/intel/icpc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/intel/icpc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/intel/ifort b/lib/spack/env/intel/ifort
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/intel/ifort
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/nag/nagfor b/lib/spack/env/nag/nagfor
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/nag/nagfor
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/pgi/case-insensitive/pgCC b/lib/spack/env/pgi/case-insensitive/pgCC
new file mode 120000
index 0000000000..e2deb67f3b
--- /dev/null
+++ b/lib/spack/env/pgi/case-insensitive/pgCC
@@ -0,0 +1 @@
+../../cc \ No newline at end of file
diff --git a/lib/spack/env/pgi/pgcc b/lib/spack/env/pgi/pgcc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/pgi/pgcc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/pgi/pgf77 b/lib/spack/env/pgi/pgf77
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/pgi/pgf77
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/pgi/pgf90 b/lib/spack/env/pgi/pgf90
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/pgi/pgf90
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/xl/xlc b/lib/spack/env/xl/xlc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/xl/xlc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/xl/xlc++ b/lib/spack/env/xl/xlc++
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/xl/xlc++
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/xl/xlf b/lib/spack/env/xl/xlf
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/xl/xlf
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/xl/xlf90 b/lib/spack/env/xl/xlf90
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/xl/xlf90
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py
index 0578022210..7a89a1ac67 100644
--- a/lib/spack/external/__init__.py
+++ b/lib/spack/external/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/external/argparse.py b/lib/spack/external/argparse.py
index 394e5da152..ec9a9ee738 100644
--- a/lib/spack/external/argparse.py
+++ b/lib/spack/external/argparse.py
@@ -1067,9 +1067,13 @@ class _SubParsersAction(Action):
class _ChoicesPseudoAction(Action):
- def __init__(self, name, help):
+ def __init__(self, name, aliases, help):
+ metavar = dest = name
+ if aliases:
+ metavar += ' (%s)' % ', '.join(aliases)
sup = super(_SubParsersAction._ChoicesPseudoAction, self)
- sup.__init__(option_strings=[], dest=name, help=help)
+ sup.__init__(option_strings=[], dest=dest, help=help,
+ metavar=metavar)
def __init__(self,
option_strings,
@@ -1097,15 +1101,22 @@ class _SubParsersAction(Action):
if kwargs.get('prog') is None:
kwargs['prog'] = '%s %s' % (self._prog_prefix, name)
+ aliases = kwargs.pop('aliases', ())
+
# create a pseudo-action to hold the choice help
if 'help' in kwargs:
help = kwargs.pop('help')
- choice_action = self._ChoicesPseudoAction(name, help)
+ choice_action = self._ChoicesPseudoAction(name, aliases, help)
self._choices_actions.append(choice_action)
# create the parser and add it to the map
parser = self._parser_class(**kwargs)
self._name_parser_map[name] = parser
+
+ # make parser available under aliases also
+ for alias in aliases:
+ self._name_parser_map[alias] = parser
+
return parser
def _get_subactions(self):
@@ -1123,8 +1134,9 @@ class _SubParsersAction(Action):
try:
parser = self._name_parser_map[parser_name]
except KeyError:
- tup = parser_name, ', '.join(self._name_parser_map)
- msg = _('unknown parser %r (choices: %s)' % tup)
+ args = {'parser_name': parser_name,
+ 'choices': ', '.join(self._name_parser_map)}
+ msg = _('unknown parser %(parser_name)r (choices: %(choices)s)') % args
raise ArgumentError(self, msg)
# parse all the remaining options into the namespace
diff --git a/lib/spack/external/functools.py b/lib/spack/external/functools_backport.py
index 19f0903c82..19f0903c82 100644
--- a/lib/spack/external/functools.py
+++ b/lib/spack/external/functools_backport.py
diff --git a/lib/spack/external/jsonschema/COPYING b/lib/spack/external/jsonschema/COPYING
new file mode 100644
index 0000000000..af9cfbdb13
--- /dev/null
+++ b/lib/spack/external/jsonschema/COPYING
@@ -0,0 +1,19 @@
+Copyright (c) 2013 Julian Berman
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/lib/spack/external/jsonschema/README.rst b/lib/spack/external/jsonschema/README.rst
new file mode 100644
index 0000000000..20c2fe6266
--- /dev/null
+++ b/lib/spack/external/jsonschema/README.rst
@@ -0,0 +1,104 @@
+==========
+jsonschema
+==========
+
+``jsonschema`` is an implementation of `JSON Schema <http://json-schema.org>`_
+for Python (supporting 2.6+ including Python 3).
+
+.. code-block:: python
+
+ >>> from jsonschema import validate
+
+ >>> # A sample schema, like what we'd get from json.load()
+ >>> schema = {
+ ... "type" : "object",
+ ... "properties" : {
+ ... "price" : {"type" : "number"},
+ ... "name" : {"type" : "string"},
+ ... },
+ ... }
+
+ >>> # If no exception is raised by validate(), the instance is valid.
+ >>> validate({"name" : "Eggs", "price" : 34.99}, schema)
+
+ >>> validate(
+ ... {"name" : "Eggs", "price" : "Invalid"}, schema
+ ... ) # doctest: +IGNORE_EXCEPTION_DETAIL
+ Traceback (most recent call last):
+ ...
+ ValidationError: 'Invalid' is not of type 'number'
+
+
+Features
+--------
+
+* Full support for
+ `Draft 3 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft3Validator>`_
+ **and** `Draft 4 <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.Draft4Validator>`_
+ of the schema.
+
+* `Lazy validation <https://python-jsonschema.readthedocs.org/en/latest/validate/#jsonschema.IValidator.iter_errors>`_
+ that can iteratively report *all* validation errors.
+
+* Small and extensible
+
+* `Programmatic querying <https://python-jsonschema.readthedocs.org/en/latest/errors/#module-jsonschema>`_
+ of which properties or items failed validation.
+
+
+Release Notes
+-------------
+
+* A simple CLI was added for validation
+* Validation errors now keep full absolute paths and absolute schema paths in
+ their ``absolute_path`` and ``absolute_schema_path`` attributes. The ``path``
+ and ``schema_path`` attributes are deprecated in favor of ``relative_path``
+ and ``relative_schema_path``\ .
+
+*Note:* Support for Python 3.2 was dropped in this release, and installation
+now uses setuptools.
+
+
+Running the Test Suite
+----------------------
+
+``jsonschema`` uses the wonderful `Tox <http://tox.readthedocs.org>`_ for its
+test suite. (It really is wonderful, if for some reason you haven't heard of
+it, you really should use it for your projects).
+
+Assuming you have ``tox`` installed (perhaps via ``pip install tox`` or your
+package manager), just run ``tox`` in the directory of your source checkout to
+run ``jsonschema``'s test suite on all of the versions of Python ``jsonschema``
+supports. Note that you'll need to have all of those versions installed in
+order to run the tests on each of them, otherwise ``tox`` will skip (and fail)
+the tests on that version.
+
+Of course you're also free to just run the tests on a single version with your
+favorite test runner. The tests live in the ``jsonschema.tests`` package.
+
+
+Community
+---------
+
+There's a `mailing list <https://groups.google.com/forum/#!forum/jsonschema>`_
+for this implementation on Google Groups.
+
+Please join, and feel free to send questions there.
+
+
+Contributing
+------------
+
+I'm Julian Berman.
+
+``jsonschema`` is on `GitHub <http://github.com/Julian/jsonschema>`_.
+
+Get in touch, via GitHub or otherwise, if you've got something to contribute,
+it'd be most welcome!
+
+You can also generally find me on Freenode (nick: ``tos9``) in various
+channels, including ``#python``.
+
+If you feel overwhelmingly grateful, you can woo me with beer money on
+`Gittip <https://www.gittip.com/Julian/>`_ or via Google Wallet with the email
+in my GitHub profile.
diff --git a/lib/spack/external/jsonschema/__init__.py b/lib/spack/external/jsonschema/__init__.py
new file mode 100644
index 0000000000..6c099f1d8b
--- /dev/null
+++ b/lib/spack/external/jsonschema/__init__.py
@@ -0,0 +1,26 @@
+"""
+An implementation of JSON Schema for Python
+
+The main functionality is provided by the validator classes for each of the
+supported JSON Schema versions.
+
+Most commonly, :func:`validate` is the quickest way to simply validate a given
+instance under a schema, and will create a validator for you.
+
+"""
+
+from jsonschema.exceptions import (
+ ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
+)
+from jsonschema._format import (
+ FormatChecker, draft3_format_checker, draft4_format_checker,
+)
+from jsonschema.validators import (
+ Draft3Validator, Draft4Validator, RefResolver, validate
+)
+
+
+__version__ = "2.4.0"
+
+
+# flake8: noqa
diff --git a/lib/spack/external/jsonschema/__main__.py b/lib/spack/external/jsonschema/__main__.py
new file mode 100644
index 0000000000..82c29fd39e
--- /dev/null
+++ b/lib/spack/external/jsonschema/__main__.py
@@ -0,0 +1,2 @@
+from jsonschema.cli import main
+main()
diff --git a/lib/spack/external/jsonschema/_format.py b/lib/spack/external/jsonschema/_format.py
new file mode 100644
index 0000000000..bb52d183ad
--- /dev/null
+++ b/lib/spack/external/jsonschema/_format.py
@@ -0,0 +1,240 @@
+import datetime
+import re
+import socket
+
+from jsonschema.compat import str_types
+from jsonschema.exceptions import FormatError
+
+
+class FormatChecker(object):
+ """
+ A ``format`` property checker.
+
+ JSON Schema does not mandate that the ``format`` property actually do any
+ validation. If validation is desired however, instances of this class can
+ be hooked into validators to enable format validation.
+
+ :class:`FormatChecker` objects always return ``True`` when asked about
+ formats that they do not know how to validate.
+
+ To check a custom format using a function that takes an instance and
+ returns a ``bool``, use the :meth:`FormatChecker.checks` or
+ :meth:`FormatChecker.cls_checks` decorators.
+
+ :argument iterable formats: the known formats to validate. This argument
+ can be used to limit which formats will be used
+ during validation.
+
+ """
+
+ checkers = {}
+
+ def __init__(self, formats=None):
+ if formats is None:
+ self.checkers = self.checkers.copy()
+ else:
+ self.checkers = dict((k, self.checkers[k]) for k in formats)
+
+ def checks(self, format, raises=()):
+ """
+ Register a decorated function as validating a new format.
+
+ :argument str format: the format that the decorated function will check
+ :argument Exception raises: the exception(s) raised by the decorated
+ function when an invalid instance is found. The exception object
+ will be accessible as the :attr:`ValidationError.cause` attribute
+ of the resulting validation error.
+
+ """
+
+ def _checks(func):
+ self.checkers[format] = (func, raises)
+ return func
+ return _checks
+
+ cls_checks = classmethod(checks)
+
+ def check(self, instance, format):
+ """
+ Check whether the instance conforms to the given format.
+
+ :argument instance: the instance to check
+ :type: any primitive type (str, number, bool)
+ :argument str format: the format that instance should conform to
+ :raises: :exc:`FormatError` if instance does not conform to format
+
+ """
+
+ if format not in self.checkers:
+ return
+
+ func, raises = self.checkers[format]
+ result, cause = None, None
+ try:
+ result = func(instance)
+ except raises as e:
+ cause = e
+ if not result:
+ raise FormatError(
+ "%r is not a %r" % (instance, format), cause=cause,
+ )
+
+ def conforms(self, instance, format):
+ """
+ Check whether the instance conforms to the given format.
+
+ :argument instance: the instance to check
+ :type: any primitive type (str, number, bool)
+ :argument str format: the format that instance should conform to
+ :rtype: bool
+
+ """
+
+ try:
+ self.check(instance, format)
+ except FormatError:
+ return False
+ else:
+ return True
+
+
+_draft_checkers = {"draft3": [], "draft4": []}
+
+
+def _checks_drafts(both=None, draft3=None, draft4=None, raises=()):
+ draft3 = draft3 or both
+ draft4 = draft4 or both
+
+ def wrap(func):
+ if draft3:
+ _draft_checkers["draft3"].append(draft3)
+ func = FormatChecker.cls_checks(draft3, raises)(func)
+ if draft4:
+ _draft_checkers["draft4"].append(draft4)
+ func = FormatChecker.cls_checks(draft4, raises)(func)
+ return func
+ return wrap
+
+
+@_checks_drafts("email")
+def is_email(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return "@" in instance
+
+
+_ipv4_re = re.compile(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$")
+
+@_checks_drafts(draft3="ip-address", draft4="ipv4")
+def is_ipv4(instance):
+ if not isinstance(instance, str_types):
+ return True
+ if not _ipv4_re.match(instance):
+ return False
+ return all(0 <= int(component) <= 255 for component in instance.split("."))
+
+
+if hasattr(socket, "inet_pton"):
+ @_checks_drafts("ipv6", raises=socket.error)
+ def is_ipv6(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return socket.inet_pton(socket.AF_INET6, instance)
+
+
+_host_name_re = re.compile(r"^[A-Za-z0-9][A-Za-z0-9\.\-]{1,255}$")
+
+@_checks_drafts(draft3="host-name", draft4="hostname")
+def is_host_name(instance):
+ if not isinstance(instance, str_types):
+ return True
+ if not _host_name_re.match(instance):
+ return False
+ components = instance.split(".")
+ for component in components:
+ if len(component) > 63:
+ return False
+ return True
+
+
+try:
+ import rfc3987
+except ImportError:
+ pass
+else:
+ @_checks_drafts("uri", raises=ValueError)
+ def is_uri(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return rfc3987.parse(instance, rule="URI")
+
+
+try:
+ import strict_rfc3339
+except ImportError:
+ try:
+ import isodate
+ except ImportError:
+ pass
+ else:
+ @_checks_drafts("date-time", raises=(ValueError, isodate.ISO8601Error))
+ def is_date(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return isodate.parse_datetime(instance)
+else:
+ @_checks_drafts("date-time")
+ def is_date(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return strict_rfc3339.validate_rfc3339(instance)
+
+
+@_checks_drafts("regex", raises=re.error)
+def is_regex(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return re.compile(instance)
+
+
+@_checks_drafts(draft3="date", raises=ValueError)
+def is_date(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return datetime.datetime.strptime(instance, "%Y-%m-%d")
+
+
+@_checks_drafts(draft3="time", raises=ValueError)
+def is_time(instance):
+ if not isinstance(instance, str_types):
+ return True
+ return datetime.datetime.strptime(instance, "%H:%M:%S")
+
+
+try:
+ import webcolors
+except ImportError:
+ pass
+else:
+ def is_css_color_code(instance):
+ return webcolors.normalize_hex(instance)
+
+
+ @_checks_drafts(draft3="color", raises=(ValueError, TypeError))
+ def is_css21_color(instance):
+ if (
+ not isinstance(instance, str_types) or
+ instance.lower() in webcolors.css21_names_to_hex
+ ):
+ return True
+ return is_css_color_code(instance)
+
+
+ def is_css3_color(instance):
+ if instance.lower() in webcolors.css3_names_to_hex:
+ return True
+ return is_css_color_code(instance)
+
+
+draft3_format_checker = FormatChecker(_draft_checkers["draft3"])
+draft4_format_checker = FormatChecker(_draft_checkers["draft4"])
diff --git a/lib/spack/external/jsonschema/_reflect.py b/lib/spack/external/jsonschema/_reflect.py
new file mode 100644
index 0000000000..d09e38fbdc
--- /dev/null
+++ b/lib/spack/external/jsonschema/_reflect.py
@@ -0,0 +1,155 @@
+# -*- test-case-name: twisted.test.test_reflect -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Standardized versions of various cool and/or strange things that you can do
+with Python's reflection capabilities.
+"""
+
+import sys
+
+from jsonschema.compat import PY3
+
+
+class _NoModuleFound(Exception):
+ """
+ No module was found because none exists.
+ """
+
+
+
+class InvalidName(ValueError):
+ """
+ The given name is not a dot-separated list of Python objects.
+ """
+
+
+
+class ModuleNotFound(InvalidName):
+ """
+ The module associated with the given name doesn't exist and it can't be
+ imported.
+ """
+
+
+
+class ObjectNotFound(InvalidName):
+ """
+ The object associated with the given name doesn't exist and it can't be
+ imported.
+ """
+
+
+
+if PY3:
+ def reraise(exception, traceback):
+ raise exception.with_traceback(traceback)
+else:
+ exec("""def reraise(exception, traceback):
+ raise exception.__class__, exception, traceback""")
+
+reraise.__doc__ = """
+Re-raise an exception, with an optional traceback, in a way that is compatible
+with both Python 2 and Python 3.
+
+Note that on Python 3, re-raised exceptions will be mutated, with their
+C{__traceback__} attribute being set.
+
+@param exception: The exception instance.
+@param traceback: The traceback to use, or C{None} indicating a new traceback.
+"""
+
+
+def _importAndCheckStack(importName):
+ """
+ Import the given name as a module, then walk the stack to determine whether
+ the failure was the module not existing, or some code in the module (for
+ example a dependent import) failing. This can be helpful to determine
+ whether any actual application code was run. For example, to distiguish
+ administrative error (entering the wrong module name), from programmer
+ error (writing buggy code in a module that fails to import).
+
+ @param importName: The name of the module to import.
+ @type importName: C{str}
+ @raise Exception: if something bad happens. This can be any type of
+ exception, since nobody knows what loading some arbitrary code might
+ do.
+ @raise _NoModuleFound: if no module was found.
+ """
+ try:
+ return __import__(importName)
+ except ImportError:
+ excType, excValue, excTraceback = sys.exc_info()
+ while excTraceback:
+ execName = excTraceback.tb_frame.f_globals["__name__"]
+ # in Python 2 execName is None when an ImportError is encountered,
+ # where in Python 3 execName is equal to the importName.
+ if execName is None or execName == importName:
+ reraise(excValue, excTraceback)
+ excTraceback = excTraceback.tb_next
+ raise _NoModuleFound()
+
+
+
+def namedAny(name):
+ """
+ Retrieve a Python object by its fully qualified name from the global Python
+ module namespace. The first part of the name, that describes a module,
+ will be discovered and imported. Each subsequent part of the name is
+ treated as the name of an attribute of the object specified by all of the
+ name which came before it. For example, the fully-qualified name of this
+ object is 'twisted.python.reflect.namedAny'.
+
+ @type name: L{str}
+ @param name: The name of the object to return.
+
+ @raise InvalidName: If the name is an empty string, starts or ends with
+ a '.', or is otherwise syntactically incorrect.
+
+ @raise ModuleNotFound: If the name is syntactically correct but the
+ module it specifies cannot be imported because it does not appear to
+ exist.
+
+ @raise ObjectNotFound: If the name is syntactically correct, includes at
+ least one '.', but the module it specifies cannot be imported because
+ it does not appear to exist.
+
+ @raise AttributeError: If an attribute of an object along the way cannot be
+ accessed, or a module along the way is not found.
+
+ @return: the Python object identified by 'name'.
+ """
+ if not name:
+ raise InvalidName('Empty module name')
+
+ names = name.split('.')
+
+ # if the name starts or ends with a '.' or contains '..', the __import__
+ # will raise an 'Empty module name' error. This will provide a better error
+ # message.
+ if '' in names:
+ raise InvalidName(
+ "name must be a string giving a '.'-separated list of Python "
+ "identifiers, not %r" % (name,))
+
+ topLevelPackage = None
+ moduleNames = names[:]
+ while not topLevelPackage:
+ if moduleNames:
+ trialname = '.'.join(moduleNames)
+ try:
+ topLevelPackage = _importAndCheckStack(trialname)
+ except _NoModuleFound:
+ moduleNames.pop()
+ else:
+ if len(names) == 1:
+ raise ModuleNotFound("No module named %r" % (name,))
+ else:
+ raise ObjectNotFound('%r does not name an object' % (name,))
+
+ obj = topLevelPackage
+ for n in names[1:]:
+ obj = getattr(obj, n)
+
+ return obj
diff --git a/lib/spack/external/jsonschema/_utils.py b/lib/spack/external/jsonschema/_utils.py
new file mode 100644
index 0000000000..2262f3305d
--- /dev/null
+++ b/lib/spack/external/jsonschema/_utils.py
@@ -0,0 +1,213 @@
+import itertools
+import json
+import pkgutil
+import re
+
+from jsonschema.compat import str_types, MutableMapping, urlsplit
+
+
+class URIDict(MutableMapping):
+ """
+ Dictionary which uses normalized URIs as keys.
+
+ """
+
+ def normalize(self, uri):
+ return urlsplit(uri).geturl()
+
+ def __init__(self, *args, **kwargs):
+ self.store = dict()
+ self.store.update(*args, **kwargs)
+
+ def __getitem__(self, uri):
+ return self.store[self.normalize(uri)]
+
+ def __setitem__(self, uri, value):
+ self.store[self.normalize(uri)] = value
+
+ def __delitem__(self, uri):
+ del self.store[self.normalize(uri)]
+
+ def __iter__(self):
+ return iter(self.store)
+
+ def __len__(self):
+ return len(self.store)
+
+ def __repr__(self):
+ return repr(self.store)
+
+
+class Unset(object):
+ """
+ An as-of-yet unset attribute or unprovided default parameter.
+
+ """
+
+ def __repr__(self):
+ return "<unset>"
+
+
+def load_schema(name):
+ """
+ Load a schema from ./schemas/``name``.json and return it.
+
+ """
+
+ data = pkgutil.get_data(__package__, "schemas/{0}.json".format(name))
+ return json.loads(data.decode("utf-8"))
+
+
+def indent(string, times=1):
+ """
+ A dumb version of :func:`textwrap.indent` from Python 3.3.
+
+ """
+
+ return "\n".join(" " * (4 * times) + line for line in string.splitlines())
+
+
+def format_as_index(indices):
+ """
+ Construct a single string containing indexing operations for the indices.
+
+ For example, [1, 2, "foo"] -> [1][2]["foo"]
+
+ :type indices: sequence
+
+ """
+
+ if not indices:
+ return ""
+ return "[%s]" % "][".join(repr(index) for index in indices)
+
+
+def find_additional_properties(instance, schema):
+ """
+ Return the set of additional properties for the given ``instance``.
+
+ Weeds out properties that should have been validated by ``properties`` and
+ / or ``patternProperties``.
+
+ Assumes ``instance`` is dict-like already.
+
+ """
+
+ properties = schema.get("properties", {})
+ patterns = "|".join(schema.get("patternProperties", {}))
+ for property in instance:
+ if property not in properties:
+ if patterns and re.search(patterns, property):
+ continue
+ yield property
+
+
+def extras_msg(extras):
+ """
+ Create an error message for extra items or properties.
+
+ """
+
+ if len(extras) == 1:
+ verb = "was"
+ else:
+ verb = "were"
+ return ", ".join(repr(extra) for extra in extras), verb
+
+
+def types_msg(instance, types):
+ """
+ Create an error message for a failure to match the given types.
+
+ If the ``instance`` is an object and contains a ``name`` property, it will
+ be considered to be a description of that object and used as its type.
+
+ Otherwise the message is simply the reprs of the given ``types``.
+
+ """
+
+ reprs = []
+ for type in types:
+ try:
+ reprs.append(repr(type["name"]))
+ except Exception:
+ reprs.append(repr(type))
+ return "%r is not of type %s" % (instance, ", ".join(reprs))
+
+
+def flatten(suitable_for_isinstance):
+ """
+ isinstance() can accept a bunch of really annoying different types:
+ * a single type
+ * a tuple of types
+ * an arbitrary nested tree of tuples
+
+ Return a flattened tuple of the given argument.
+
+ """
+
+ types = set()
+
+ if not isinstance(suitable_for_isinstance, tuple):
+ suitable_for_isinstance = (suitable_for_isinstance,)
+ for thing in suitable_for_isinstance:
+ if isinstance(thing, tuple):
+ types.update(flatten(thing))
+ else:
+ types.add(thing)
+ return tuple(types)
+
+
+def ensure_list(thing):
+ """
+ Wrap ``thing`` in a list if it's a single str.
+
+ Otherwise, return it unchanged.
+
+ """
+
+ if isinstance(thing, str_types):
+ return [thing]
+ return thing
+
+
+def unbool(element, true=object(), false=object()):
+ """
+ A hack to make True and 1 and False and 0 unique for ``uniq``.
+
+ """
+
+ if element is True:
+ return true
+ elif element is False:
+ return false
+ return element
+
+
+def uniq(container):
+ """
+ Check if all of a container's elements are unique.
+
+ Successively tries first to rely that the elements are hashable, then
+ falls back on them being sortable, and finally falls back on brute
+ force.
+
+ """
+
+ try:
+ return len(set(unbool(i) for i in container)) == len(container)
+ except TypeError:
+ try:
+ sort = sorted(unbool(i) for i in container)
+ sliced = itertools.islice(sort, 1, None)
+ for i, j in zip(sort, sliced):
+ if i == j:
+ return False
+ except (NotImplementedError, TypeError):
+ seen = []
+ for e in container:
+ e = unbool(e)
+ if e in seen:
+ return False
+ seen.append(e)
+ return True
diff --git a/lib/spack/external/jsonschema/_validators.py b/lib/spack/external/jsonschema/_validators.py
new file mode 100644
index 0000000000..c6e801ccb2
--- /dev/null
+++ b/lib/spack/external/jsonschema/_validators.py
@@ -0,0 +1,358 @@
+import re
+
+from jsonschema import _utils
+from jsonschema.exceptions import FormatError, ValidationError
+from jsonschema.compat import iteritems
+
+
+FLOAT_TOLERANCE = 10 ** -15
+
+
+def patternProperties(validator, patternProperties, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for pattern, subschema in iteritems(patternProperties):
+ for k, v in iteritems(instance):
+ if re.search(pattern, k):
+ for error in validator.descend(
+ v, subschema, path=k, schema_path=pattern,
+ ):
+ yield error
+
+
+def additionalProperties(validator, aP, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ extras = set(_utils.find_additional_properties(instance, schema))
+
+ if validator.is_type(aP, "object"):
+ for extra in extras:
+ for error in validator.descend(instance[extra], aP, path=extra):
+ yield error
+ elif not aP and extras:
+ error = "Additional properties are not allowed (%s %s unexpected)"
+ yield ValidationError(error % _utils.extras_msg(extras))
+
+
+def items(validator, items, instance, schema):
+ if not validator.is_type(instance, "array"):
+ return
+
+ if validator.is_type(items, "object"):
+ for index, item in enumerate(instance):
+ for error in validator.descend(item, items, path=index):
+ yield error
+ else:
+ for (index, item), subschema in zip(enumerate(instance), items):
+ for error in validator.descend(
+ item, subschema, path=index, schema_path=index,
+ ):
+ yield error
+
+
+def additionalItems(validator, aI, instance, schema):
+ if (
+ not validator.is_type(instance, "array") or
+ validator.is_type(schema.get("items", {}), "object")
+ ):
+ return
+
+ len_items = len(schema.get("items", []))
+ if validator.is_type(aI, "object"):
+ for index, item in enumerate(instance[len_items:], start=len_items):
+ for error in validator.descend(item, aI, path=index):
+ yield error
+ elif not aI and len(instance) > len(schema.get("items", [])):
+ error = "Additional items are not allowed (%s %s unexpected)"
+ yield ValidationError(
+ error %
+ _utils.extras_msg(instance[len(schema.get("items", [])):])
+ )
+
+
+def minimum(validator, minimum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if schema.get("exclusiveMinimum", False):
+ failed = float(instance) <= minimum
+ cmp = "less than or equal to"
+ else:
+ failed = float(instance) < minimum
+ cmp = "less than"
+
+ if failed:
+ yield ValidationError(
+ "%r is %s the minimum of %r" % (instance, cmp, minimum)
+ )
+
+
+def maximum(validator, maximum, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if schema.get("exclusiveMaximum", False):
+ failed = instance >= maximum
+ cmp = "greater than or equal to"
+ else:
+ failed = instance > maximum
+ cmp = "greater than"
+
+ if failed:
+ yield ValidationError(
+ "%r is %s the maximum of %r" % (instance, cmp, maximum)
+ )
+
+
+def multipleOf(validator, dB, instance, schema):
+ if not validator.is_type(instance, "number"):
+ return
+
+ if isinstance(dB, float):
+ mod = instance % dB
+ failed = (mod > FLOAT_TOLERANCE) and (dB - mod) > FLOAT_TOLERANCE
+ else:
+ failed = instance % dB
+
+ if failed:
+ yield ValidationError("%r is not a multiple of %r" % (instance, dB))
+
+
+def minItems(validator, mI, instance, schema):
+ if validator.is_type(instance, "array") and len(instance) < mI:
+ yield ValidationError("%r is too short" % (instance,))
+
+
+def maxItems(validator, mI, instance, schema):
+ if validator.is_type(instance, "array") and len(instance) > mI:
+ yield ValidationError("%r is too long" % (instance,))
+
+
+def uniqueItems(validator, uI, instance, schema):
+ if (
+ uI and
+ validator.is_type(instance, "array") and
+ not _utils.uniq(instance)
+ ):
+ yield ValidationError("%r has non-unique elements" % instance)
+
+
+def pattern(validator, patrn, instance, schema):
+ if (
+ validator.is_type(instance, "string") and
+ not re.search(patrn, instance)
+ ):
+ yield ValidationError("%r does not match %r" % (instance, patrn))
+
+
+def format(validator, format, instance, schema):
+ if validator.format_checker is not None:
+ try:
+ validator.format_checker.check(instance, format)
+ except FormatError as error:
+ yield ValidationError(error.message, cause=error.cause)
+
+
+def minLength(validator, mL, instance, schema):
+ if validator.is_type(instance, "string") and len(instance) < mL:
+ yield ValidationError("%r is too short" % (instance,))
+
+
+def maxLength(validator, mL, instance, schema):
+ if validator.is_type(instance, "string") and len(instance) > mL:
+ yield ValidationError("%r is too long" % (instance,))
+
+
+def dependencies(validator, dependencies, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, dependency in iteritems(dependencies):
+ if property not in instance:
+ continue
+
+ if validator.is_type(dependency, "object"):
+ for error in validator.descend(
+ instance, dependency, schema_path=property,
+ ):
+ yield error
+ else:
+ dependencies = _utils.ensure_list(dependency)
+ for dependency in dependencies:
+ if dependency not in instance:
+ yield ValidationError(
+ "%r is a dependency of %r" % (dependency, property)
+ )
+
+
+def enum(validator, enums, instance, schema):
+ if instance not in enums:
+ yield ValidationError("%r is not one of %r" % (instance, enums))
+
+
+def ref(validator, ref, instance, schema):
+ with validator.resolver.resolving(ref) as resolved:
+ for error in validator.descend(instance, resolved):
+ yield error
+
+
+def type_draft3(validator, types, instance, schema):
+ types = _utils.ensure_list(types)
+
+ all_errors = []
+ for index, type in enumerate(types):
+ if type == "any":
+ return
+ if validator.is_type(type, "object"):
+ errors = list(validator.descend(instance, type, schema_path=index))
+ if not errors:
+ return
+ all_errors.extend(errors)
+ else:
+ if validator.is_type(instance, type):
+ return
+ else:
+ yield ValidationError(
+ _utils.types_msg(instance, types), context=all_errors,
+ )
+
+
+def properties_draft3(validator, properties, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, subschema in iteritems(properties):
+ if property in instance:
+ for error in validator.descend(
+ instance[property],
+ subschema,
+ path=property,
+ schema_path=property,
+ ):
+ yield error
+ elif subschema.get("required", False):
+ error = ValidationError("%r is a required property" % property)
+ error._set(
+ validator="required",
+ validator_value=subschema["required"],
+ instance=instance,
+ schema=schema,
+ )
+ error.path.appendleft(property)
+ error.schema_path.extend([property, "required"])
+ yield error
+
+
+def disallow_draft3(validator, disallow, instance, schema):
+ for disallowed in _utils.ensure_list(disallow):
+ if validator.is_valid(instance, {"type" : [disallowed]}):
+ yield ValidationError(
+ "%r is disallowed for %r" % (disallowed, instance)
+ )
+
+
+def extends_draft3(validator, extends, instance, schema):
+ if validator.is_type(extends, "object"):
+ for error in validator.descend(instance, extends):
+ yield error
+ return
+ for index, subschema in enumerate(extends):
+ for error in validator.descend(instance, subschema, schema_path=index):
+ yield error
+
+
+def type_draft4(validator, types, instance, schema):
+ types = _utils.ensure_list(types)
+
+ if not any(validator.is_type(instance, type) for type in types):
+ yield ValidationError(_utils.types_msg(instance, types))
+
+
+def properties_draft4(validator, properties, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+
+ for property, subschema in iteritems(properties):
+ if property in instance:
+ for error in validator.descend(
+ instance[property],
+ subschema,
+ path=property,
+ schema_path=property,
+ ):
+ yield error
+
+
+def required_draft4(validator, required, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+ for property in required:
+ if property not in instance:
+ yield ValidationError("%r is a required property" % property)
+
+
+def minProperties_draft4(validator, mP, instance, schema):
+ if validator.is_type(instance, "object") and len(instance) < mP:
+ yield ValidationError(
+ "%r does not have enough properties" % (instance,)
+ )
+
+
+def maxProperties_draft4(validator, mP, instance, schema):
+ if not validator.is_type(instance, "object"):
+ return
+ if validator.is_type(instance, "object") and len(instance) > mP:
+ yield ValidationError("%r has too many properties" % (instance,))
+
+
+def allOf_draft4(validator, allOf, instance, schema):
+ for index, subschema in enumerate(allOf):
+ for error in validator.descend(instance, subschema, schema_path=index):
+ yield error
+
+
+def oneOf_draft4(validator, oneOf, instance, schema):
+ subschemas = enumerate(oneOf)
+ all_errors = []
+ for index, subschema in subschemas:
+ errs = list(validator.descend(instance, subschema, schema_path=index))
+ if not errs:
+ first_valid = subschema
+ break
+ all_errors.extend(errs)
+ else:
+ yield ValidationError(
+ "%r is not valid under any of the given schemas" % (instance,),
+ context=all_errors,
+ )
+
+ more_valid = [s for i, s in subschemas if validator.is_valid(instance, s)]
+ if more_valid:
+ more_valid.append(first_valid)
+ reprs = ", ".join(repr(schema) for schema in more_valid)
+ yield ValidationError(
+ "%r is valid under each of %s" % (instance, reprs)
+ )
+
+
+def anyOf_draft4(validator, anyOf, instance, schema):
+ all_errors = []
+ for index, subschema in enumerate(anyOf):
+ errs = list(validator.descend(instance, subschema, schema_path=index))
+ if not errs:
+ break
+ all_errors.extend(errs)
+ else:
+ yield ValidationError(
+ "%r is not valid under any of the given schemas" % (instance,),
+ context=all_errors,
+ )
+
+
+def not_draft4(validator, not_schema, instance, schema):
+ if validator.is_valid(instance, not_schema):
+ yield ValidationError(
+ "%r is not allowed for %r" % (not_schema, instance)
+ )
diff --git a/lib/spack/external/jsonschema/cli.py b/lib/spack/external/jsonschema/cli.py
new file mode 100644
index 0000000000..0126564f46
--- /dev/null
+++ b/lib/spack/external/jsonschema/cli.py
@@ -0,0 +1,72 @@
+from __future__ import absolute_import
+import argparse
+import json
+import sys
+
+from jsonschema._reflect import namedAny
+from jsonschema.validators import validator_for
+
+
+def _namedAnyWithDefault(name):
+ if "." not in name:
+ name = "jsonschema." + name
+ return namedAny(name)
+
+
+def _json_file(path):
+ with open(path) as file:
+ return json.load(file)
+
+
+parser = argparse.ArgumentParser(
+ description="JSON Schema Validation CLI",
+)
+parser.add_argument(
+ "-i", "--instance",
+ action="append",
+ dest="instances",
+ type=_json_file,
+ help="a path to a JSON instance to validate "
+ "(may be specified multiple times)",
+)
+parser.add_argument(
+ "-F", "--error-format",
+ default="{error.instance}: {error.message}\n",
+ help="the format to use for each error output message, specified in "
+ "a form suitable for passing to str.format, which will be called "
+ "with 'error' for each error",
+)
+parser.add_argument(
+ "-V", "--validator",
+ type=_namedAnyWithDefault,
+ help="the fully qualified object name of a validator to use, or, for "
+ "validators that are registered with jsonschema, simply the name "
+ "of the class.",
+)
+parser.add_argument(
+ "schema",
+ help="the JSON Schema to validate with",
+ type=_json_file,
+)
+
+
+def parse_args(args):
+ arguments = vars(parser.parse_args(args=args or ["--help"]))
+ if arguments["validator"] is None:
+ arguments["validator"] = validator_for(arguments["schema"])
+ return arguments
+
+
+def main(args=sys.argv[1:]):
+ sys.exit(run(arguments=parse_args(args=args)))
+
+
+def run(arguments, stdout=sys.stdout, stderr=sys.stderr):
+ error_format = arguments["error_format"]
+ validator = arguments["validator"](schema=arguments["schema"])
+ errored = False
+ for instance in arguments["instances"] or ():
+ for error in validator.iter_errors(instance):
+ stderr.write(error_format.format(error=error))
+ errored = True
+ return errored
diff --git a/lib/spack/external/jsonschema/compat.py b/lib/spack/external/jsonschema/compat.py
new file mode 100644
index 0000000000..6ca49ab6be
--- /dev/null
+++ b/lib/spack/external/jsonschema/compat.py
@@ -0,0 +1,53 @@
+from __future__ import unicode_literals
+import sys
+import operator
+
+try:
+ from collections import MutableMapping, Sequence # noqa
+except ImportError:
+ from collections.abc import MutableMapping, Sequence # noqa
+
+PY3 = sys.version_info[0] >= 3
+
+if PY3:
+ zip = zip
+ from io import StringIO
+ from urllib.parse import (
+ unquote, urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit
+ )
+ from urllib.request import urlopen
+ str_types = str,
+ int_types = int,
+ iteritems = operator.methodcaller("items")
+else:
+ from itertools import izip as zip # noqa
+ from StringIO import StringIO
+ from urlparse import (
+ urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit # noqa
+ )
+ from urllib import unquote # noqa
+ from urllib2 import urlopen # noqa
+ str_types = basestring
+ int_types = int, long
+ iteritems = operator.methodcaller("iteritems")
+
+
+# On python < 3.3 fragments are not handled properly with unknown schemes
+def urlsplit(url):
+ scheme, netloc, path, query, fragment = _urlsplit(url)
+ if "#" in path:
+ path, fragment = path.split("#", 1)
+ return SplitResult(scheme, netloc, path, query, fragment)
+
+
+def urldefrag(url):
+ if "#" in url:
+ s, n, p, q, frag = urlsplit(url)
+ defrag = urlunsplit((s, n, p, q, ''))
+ else:
+ defrag = url
+ frag = ''
+ return defrag, frag
+
+
+# flake8: noqa
diff --git a/lib/spack/external/jsonschema/exceptions.py b/lib/spack/external/jsonschema/exceptions.py
new file mode 100644
index 0000000000..478e59c531
--- /dev/null
+++ b/lib/spack/external/jsonschema/exceptions.py
@@ -0,0 +1,264 @@
+from collections import defaultdict, deque
+import itertools
+import pprint
+import textwrap
+
+from jsonschema import _utils
+from jsonschema.compat import PY3, iteritems
+
+
+WEAK_MATCHES = frozenset(["anyOf", "oneOf"])
+STRONG_MATCHES = frozenset()
+
+_unset = _utils.Unset()
+
+
+class _Error(Exception):
+ def __init__(
+ self,
+ message,
+ validator=_unset,
+ path=(),
+ cause=None,
+ context=(),
+ validator_value=_unset,
+ instance=_unset,
+ schema=_unset,
+ schema_path=(),
+ parent=None,
+ ):
+ self.message = message
+ self.path = self.relative_path = deque(path)
+ self.schema_path = self.relative_schema_path = deque(schema_path)
+ self.context = list(context)
+ self.cause = self.__cause__ = cause
+ self.validator = validator
+ self.validator_value = validator_value
+ self.instance = instance
+ self.schema = schema
+ self.parent = parent
+
+ for error in context:
+ error.parent = self
+
+ def __repr__(self):
+ return "<%s: %r>" % (self.__class__.__name__, self.message)
+
+ def __str__(self):
+ return unicode(self).encode("utf-8")
+
+ def __unicode__(self):
+ essential_for_verbose = (
+ self.validator, self.validator_value, self.instance, self.schema,
+ )
+ if any(m is _unset for m in essential_for_verbose):
+ return self.message
+
+ pschema = pprint.pformat(self.schema, width=72)
+ pinstance = pprint.pformat(self.instance, width=72)
+ return self.message + textwrap.dedent("""
+
+ Failed validating %r in schema%s:
+ %s
+
+ On instance%s:
+ %s
+ """.rstrip()
+ ) % (
+ self.validator,
+ _utils.format_as_index(list(self.relative_schema_path)[:-1]),
+ _utils.indent(pschema),
+ _utils.format_as_index(self.relative_path),
+ _utils.indent(pinstance),
+ )
+
+ if PY3:
+ __str__ = __unicode__
+
+ @classmethod
+ def create_from(cls, other):
+ return cls(**other._contents())
+
+ @property
+ def absolute_path(self):
+ parent = self.parent
+ if parent is None:
+ return self.relative_path
+
+ path = deque(self.relative_path)
+ path.extendleft(parent.absolute_path)
+ return path
+
+ @property
+ def absolute_schema_path(self):
+ parent = self.parent
+ if parent is None:
+ return self.relative_schema_path
+
+ path = deque(self.relative_schema_path)
+ path.extendleft(parent.absolute_schema_path)
+ return path
+
+ def _set(self, **kwargs):
+ for k, v in iteritems(kwargs):
+ if getattr(self, k) is _unset:
+ setattr(self, k, v)
+
+ def _contents(self):
+ attrs = (
+ "message", "cause", "context", "validator", "validator_value",
+ "path", "schema_path", "instance", "schema", "parent",
+ )
+ return dict((attr, getattr(self, attr)) for attr in attrs)
+
+
+class ValidationError(_Error):
+ pass
+
+
+class SchemaError(_Error):
+ pass
+
+
+class RefResolutionError(Exception):
+ pass
+
+
+class UnknownType(Exception):
+ def __init__(self, type, instance, schema):
+ self.type = type
+ self.instance = instance
+ self.schema = schema
+
+ def __str__(self):
+ return unicode(self).encode("utf-8")
+
+ def __unicode__(self):
+ pschema = pprint.pformat(self.schema, width=72)
+ pinstance = pprint.pformat(self.instance, width=72)
+ return textwrap.dedent("""
+ Unknown type %r for validator with schema:
+ %s
+
+ While checking instance:
+ %s
+ """.rstrip()
+ ) % (self.type, _utils.indent(pschema), _utils.indent(pinstance))
+
+ if PY3:
+ __str__ = __unicode__
+
+
+
+class FormatError(Exception):
+ def __init__(self, message, cause=None):
+ super(FormatError, self).__init__(message, cause)
+ self.message = message
+ self.cause = self.__cause__ = cause
+
+ def __str__(self):
+ return self.message.encode("utf-8")
+
+ def __unicode__(self):
+ return self.message
+
+ if PY3:
+ __str__ = __unicode__
+
+
+class ErrorTree(object):
+ """
+ ErrorTrees make it easier to check which validations failed.
+
+ """
+
+ _instance = _unset
+
+ def __init__(self, errors=()):
+ self.errors = {}
+ self._contents = defaultdict(self.__class__)
+
+ for error in errors:
+ container = self
+ for element in error.path:
+ container = container[element]
+ container.errors[error.validator] = error
+
+ self._instance = error.instance
+
+ def __contains__(self, index):
+ """
+ Check whether ``instance[index]`` has any errors.
+
+ """
+
+ return index in self._contents
+
+ def __getitem__(self, index):
+ """
+ Retrieve the child tree one level down at the given ``index``.
+
+ If the index is not in the instance that this tree corresponds to and
+ is not known by this tree, whatever error would be raised by
+ ``instance.__getitem__`` will be propagated (usually this is some
+ subclass of :class:`LookupError`.
+
+ """
+
+ if self._instance is not _unset and index not in self:
+ self._instance[index]
+ return self._contents[index]
+
+ def __setitem__(self, index, value):
+ self._contents[index] = value
+
+ def __iter__(self):
+ """
+ Iterate (non-recursively) over the indices in the instance with errors.
+
+ """
+
+ return iter(self._contents)
+
+ def __len__(self):
+ """
+ Same as :attr:`total_errors`.
+
+ """
+
+ return self.total_errors
+
+ def __repr__(self):
+ return "<%s (%s total errors)>" % (self.__class__.__name__, len(self))
+
+ @property
+ def total_errors(self):
+ """
+ The total number of errors in the entire tree, including children.
+
+ """
+
+ child_errors = sum(len(tree) for _, tree in iteritems(self._contents))
+ return len(self.errors) + child_errors
+
+
+def by_relevance(weak=WEAK_MATCHES, strong=STRONG_MATCHES):
+ def relevance(error):
+ validator = error.validator
+ return -len(error.path), validator not in weak, validator in strong
+ return relevance
+
+
+relevance = by_relevance()
+
+
+def best_match(errors, key=relevance):
+ errors = iter(errors)
+ best = next(errors, None)
+ if best is None:
+ return
+ best = max(itertools.chain([best], errors), key=key)
+
+ while best.context:
+ best = min(best.context, key=key)
+ return best
diff --git a/lib/spack/external/jsonschema/schemas/draft3.json b/lib/spack/external/jsonschema/schemas/draft3.json
new file mode 100644
index 0000000000..5bcefe30d5
--- /dev/null
+++ b/lib/spack/external/jsonschema/schemas/draft3.json
@@ -0,0 +1,201 @@
+{
+ "$schema": "http://json-schema.org/draft-03/schema#",
+ "dependencies": {
+ "exclusiveMaximum": "maximum",
+ "exclusiveMinimum": "minimum"
+ },
+ "id": "http://json-schema.org/draft-03/schema#",
+ "properties": {
+ "$ref": {
+ "format": "uri",
+ "type": "string"
+ },
+ "$schema": {
+ "format": "uri",
+ "type": "string"
+ },
+ "additionalItems": {
+ "default": {},
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "boolean"
+ ]
+ },
+ "additionalProperties": {
+ "default": {},
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "boolean"
+ ]
+ },
+ "default": {
+ "type": "any"
+ },
+ "dependencies": {
+ "additionalProperties": {
+ "items": {
+ "type": "string"
+ },
+ "type": [
+ "string",
+ "array",
+ {
+ "$ref": "#"
+ }
+ ]
+ },
+ "default": {},
+ "type": [
+ "string",
+ "array",
+ "object"
+ ]
+ },
+ "description": {
+ "type": "string"
+ },
+ "disallow": {
+ "items": {
+ "type": [
+ "string",
+ {
+ "$ref": "#"
+ }
+ ]
+ },
+ "type": [
+ "string",
+ "array"
+ ],
+ "uniqueItems": true
+ },
+ "divisibleBy": {
+ "default": 1,
+ "exclusiveMinimum": true,
+ "minimum": 0,
+ "type": "number"
+ },
+ "enum": {
+ "minItems": 1,
+ "type": "array",
+ "uniqueItems": true
+ },
+ "exclusiveMaximum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "exclusiveMinimum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "extends": {
+ "default": {},
+ "items": {
+ "$ref": "#"
+ },
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "array"
+ ]
+ },
+ "format": {
+ "type": "string"
+ },
+ "id": {
+ "format": "uri",
+ "type": "string"
+ },
+ "items": {
+ "default": {},
+ "items": {
+ "$ref": "#"
+ },
+ "type": [
+ {
+ "$ref": "#"
+ },
+ "array"
+ ]
+ },
+ "maxDecimal": {
+ "minimum": 0,
+ "type": "number"
+ },
+ "maxItems": {
+ "minimum": 0,
+ "type": "integer"
+ },
+ "maxLength": {
+ "type": "integer"
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "minItems": {
+ "default": 0,
+ "minimum": 0,
+ "type": "integer"
+ },
+ "minLength": {
+ "default": 0,
+ "minimum": 0,
+ "type": "integer"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "pattern": {
+ "format": "regex",
+ "type": "string"
+ },
+ "patternProperties": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "properties": {
+ "additionalProperties": {
+ "$ref": "#",
+ "type": "object"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "required": {
+ "default": false,
+ "type": "boolean"
+ },
+ "title": {
+ "type": "string"
+ },
+ "type": {
+ "default": "any",
+ "items": {
+ "type": [
+ "string",
+ {
+ "$ref": "#"
+ }
+ ]
+ },
+ "type": [
+ "string",
+ "array"
+ ],
+ "uniqueItems": true
+ },
+ "uniqueItems": {
+ "default": false,
+ "type": "boolean"
+ }
+ },
+ "type": "object"
+}
diff --git a/lib/spack/external/jsonschema/schemas/draft4.json b/lib/spack/external/jsonschema/schemas/draft4.json
new file mode 100644
index 0000000000..fead5cefab
--- /dev/null
+++ b/lib/spack/external/jsonschema/schemas/draft4.json
@@ -0,0 +1,221 @@
+{
+ "$schema": "http://json-schema.org/draft-04/schema#",
+ "default": {},
+ "definitions": {
+ "positiveInteger": {
+ "minimum": 0,
+ "type": "integer"
+ },
+ "positiveIntegerDefault0": {
+ "allOf": [
+ {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ {
+ "default": 0
+ }
+ ]
+ },
+ "schemaArray": {
+ "items": {
+ "$ref": "#"
+ },
+ "minItems": 1,
+ "type": "array"
+ },
+ "simpleTypes": {
+ "enum": [
+ "array",
+ "boolean",
+ "integer",
+ "null",
+ "number",
+ "object",
+ "string"
+ ]
+ },
+ "stringArray": {
+ "items": {
+ "type": "string"
+ },
+ "minItems": 1,
+ "type": "array",
+ "uniqueItems": true
+ }
+ },
+ "dependencies": {
+ "exclusiveMaximum": [
+ "maximum"
+ ],
+ "exclusiveMinimum": [
+ "minimum"
+ ]
+ },
+ "description": "Core schema meta-schema",
+ "id": "http://json-schema.org/draft-04/schema#",
+ "properties": {
+ "$schema": {
+ "format": "uri",
+ "type": "string"
+ },
+ "additionalItems": {
+ "anyOf": [
+ {
+ "type": "boolean"
+ },
+ {
+ "$ref": "#"
+ }
+ ],
+ "default": {}
+ },
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "type": "boolean"
+ },
+ {
+ "$ref": "#"
+ }
+ ],
+ "default": {}
+ },
+ "allOf": {
+ "$ref": "#/definitions/schemaArray"
+ },
+ "anyOf": {
+ "$ref": "#/definitions/schemaArray"
+ },
+ "default": {},
+ "definitions": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "dependencies": {
+ "additionalProperties": {
+ "anyOf": [
+ {
+ "$ref": "#"
+ },
+ {
+ "$ref": "#/definitions/stringArray"
+ }
+ ]
+ },
+ "type": "object"
+ },
+ "description": {
+ "type": "string"
+ },
+ "enum": {
+ "minItems": 1,
+ "type": "array",
+ "uniqueItems": true
+ },
+ "exclusiveMaximum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "exclusiveMinimum": {
+ "default": false,
+ "type": "boolean"
+ },
+ "id": {
+ "format": "uri",
+ "type": "string"
+ },
+ "items": {
+ "anyOf": [
+ {
+ "$ref": "#"
+ },
+ {
+ "$ref": "#/definitions/schemaArray"
+ }
+ ],
+ "default": {}
+ },
+ "maxItems": {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ "maxLength": {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ "maxProperties": {
+ "$ref": "#/definitions/positiveInteger"
+ },
+ "maximum": {
+ "type": "number"
+ },
+ "minItems": {
+ "$ref": "#/definitions/positiveIntegerDefault0"
+ },
+ "minLength": {
+ "$ref": "#/definitions/positiveIntegerDefault0"
+ },
+ "minProperties": {
+ "$ref": "#/definitions/positiveIntegerDefault0"
+ },
+ "minimum": {
+ "type": "number"
+ },
+ "multipleOf": {
+ "exclusiveMinimum": true,
+ "minimum": 0,
+ "type": "number"
+ },
+ "not": {
+ "$ref": "#"
+ },
+ "oneOf": {
+ "$ref": "#/definitions/schemaArray"
+ },
+ "pattern": {
+ "format": "regex",
+ "type": "string"
+ },
+ "patternProperties": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "properties": {
+ "additionalProperties": {
+ "$ref": "#"
+ },
+ "default": {},
+ "type": "object"
+ },
+ "required": {
+ "$ref": "#/definitions/stringArray"
+ },
+ "title": {
+ "type": "string"
+ },
+ "type": {
+ "anyOf": [
+ {
+ "$ref": "#/definitions/simpleTypes"
+ },
+ {
+ "items": {
+ "$ref": "#/definitions/simpleTypes"
+ },
+ "minItems": 1,
+ "type": "array",
+ "uniqueItems": true
+ }
+ ]
+ },
+ "uniqueItems": {
+ "default": false,
+ "type": "boolean"
+ }
+ },
+ "type": "object"
+}
diff --git a/lib/spack/external/jsonschema/tests/__init__.py b/lib/spack/external/jsonschema/tests/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/external/jsonschema/tests/__init__.py
diff --git a/lib/spack/external/jsonschema/tests/compat.py b/lib/spack/external/jsonschema/tests/compat.py
new file mode 100644
index 0000000000..b37483f5dd
--- /dev/null
+++ b/lib/spack/external/jsonschema/tests/compat.py
@@ -0,0 +1,15 @@
+import sys
+
+
+if sys.version_info[:2] < (2, 7): # pragma: no cover
+ import unittest2 as unittest
+else:
+ import unittest
+
+try:
+ from unittest import mock
+except ImportError:
+ import mock
+
+
+# flake8: noqa
diff --git a/lib/spack/external/jsonschema/tests/test_cli.py b/lib/spack/external/jsonschema/tests/test_cli.py
new file mode 100644
index 0000000000..f625ca989d
--- /dev/null
+++ b/lib/spack/external/jsonschema/tests/test_cli.py
@@ -0,0 +1,110 @@
+from jsonschema import Draft4Validator, ValidationError, cli
+from jsonschema.compat import StringIO
+from jsonschema.tests.compat import mock, unittest
+
+
+def fake_validator(*errors):
+ errors = list(reversed(errors))
+
+ class FakeValidator(object):
+ def __init__(self, *args, **kwargs):
+ pass
+
+ def iter_errors(self, instance):
+ if errors:
+ return errors.pop()
+ return []
+ return FakeValidator
+
+
+class TestParser(unittest.TestCase):
+ FakeValidator = fake_validator()
+
+ def setUp(self):
+ mock_open = mock.mock_open()
+ patch_open = mock.patch.object(cli, "open", mock_open, create=True)
+ patch_open.start()
+ self.addCleanup(patch_open.stop)
+
+ mock_json_load = mock.Mock()
+ mock_json_load.return_value = {}
+ patch_json_load = mock.patch("json.load")
+ patch_json_load.start()
+ self.addCleanup(patch_json_load.stop)
+
+ def test_find_validator_by_fully_qualified_object_name(self):
+ arguments = cli.parse_args(
+ [
+ "--validator",
+ "jsonschema.tests.test_cli.TestParser.FakeValidator",
+ "--instance", "foo.json",
+ "schema.json",
+ ]
+ )
+ self.assertIs(arguments["validator"], self.FakeValidator)
+
+ def test_find_validator_in_jsonschema(self):
+ arguments = cli.parse_args(
+ [
+ "--validator", "Draft4Validator",
+ "--instance", "foo.json",
+ "schema.json",
+ ]
+ )
+ self.assertIs(arguments["validator"], Draft4Validator)
+
+
+class TestCLI(unittest.TestCase):
+ def test_successful_validation(self):
+ stdout, stderr = StringIO(), StringIO()
+ exit_code = cli.run(
+ {
+ "validator": fake_validator(),
+ "schema": {},
+ "instances": [1],
+ "error_format": "{error.message}",
+ },
+ stdout=stdout,
+ stderr=stderr,
+ )
+ self.assertFalse(stdout.getvalue())
+ self.assertFalse(stderr.getvalue())
+ self.assertEqual(exit_code, 0)
+
+ def test_unsuccessful_validation(self):
+ error = ValidationError("I am an error!", instance=1)
+ stdout, stderr = StringIO(), StringIO()
+ exit_code = cli.run(
+ {
+ "validator": fake_validator([error]),
+ "schema": {},
+ "instances": [1],
+ "error_format": "{error.instance} - {error.message}",
+ },
+ stdout=stdout,
+ stderr=stderr,
+ )
+ self.assertFalse(stdout.getvalue())
+ self.assertEqual(stderr.getvalue(), "1 - I am an error!")
+ self.assertEqual(exit_code, 1)
+
+ def test_unsuccessful_validation_multiple_instances(self):
+ first_errors = [
+ ValidationError("9", instance=1),
+ ValidationError("8", instance=1),
+ ]
+ second_errors = [ValidationError("7", instance=2)]
+ stdout, stderr = StringIO(), StringIO()
+ exit_code = cli.run(
+ {
+ "validator": fake_validator(first_errors, second_errors),
+ "schema": {},
+ "instances": [1, 2],
+ "error_format": "{error.instance} - {error.message}\t",
+ },
+ stdout=stdout,
+ stderr=stderr,
+ )
+ self.assertFalse(stdout.getvalue())
+ self.assertEqual(stderr.getvalue(), "1 - 9\t1 - 8\t2 - 7\t")
+ self.assertEqual(exit_code, 1)
diff --git a/lib/spack/external/jsonschema/tests/test_exceptions.py b/lib/spack/external/jsonschema/tests/test_exceptions.py
new file mode 100644
index 0000000000..9e5793c628
--- /dev/null
+++ b/lib/spack/external/jsonschema/tests/test_exceptions.py
@@ -0,0 +1,382 @@
+import textwrap
+
+from jsonschema import Draft4Validator, exceptions
+from jsonschema.compat import PY3
+from jsonschema.tests.compat import mock, unittest
+
+
+class TestBestMatch(unittest.TestCase):
+ def best_match(self, errors):
+ errors = list(errors)
+ best = exceptions.best_match(errors)
+ reversed_best = exceptions.best_match(reversed(errors))
+ self.assertEqual(
+ best,
+ reversed_best,
+ msg="Didn't return a consistent best match!\n"
+ "Got: {0}\n\nThen: {1}".format(best, reversed_best),
+ )
+ return best
+
+ def test_shallower_errors_are_better_matches(self):
+ validator = Draft4Validator(
+ {
+ "properties" : {
+ "foo" : {
+ "minProperties" : 2,
+ "properties" : {"bar" : {"type" : "object"}},
+ }
+ }
+ }
+ )
+ best = self.best_match(validator.iter_errors({"foo" : {"bar" : []}}))
+ self.assertEqual(best.validator, "minProperties")
+
+ def test_oneOf_and_anyOf_are_weak_matches(self):
+ """
+ A property you *must* match is probably better than one you have to
+ match a part of.
+
+ """
+
+ validator = Draft4Validator(
+ {
+ "minProperties" : 2,
+ "anyOf" : [{"type" : "string"}, {"type" : "number"}],
+ "oneOf" : [{"type" : "string"}, {"type" : "number"}],
+ }
+ )
+ best = self.best_match(validator.iter_errors({}))
+ self.assertEqual(best.validator, "minProperties")
+
+ def test_if_the_most_relevant_error_is_anyOf_it_is_traversed(self):
+ """
+ If the most relevant error is an anyOf, then we traverse its context
+ and select the otherwise *least* relevant error, since in this case
+ that means the most specific, deep, error inside the instance.
+
+ I.e. since only one of the schemas must match, we look for the most
+ relevant one.
+
+ """
+
+ validator = Draft4Validator(
+ {
+ "properties" : {
+ "foo" : {
+ "anyOf" : [
+ {"type" : "string"},
+ {"properties" : {"bar" : {"type" : "array"}}},
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
+ self.assertEqual(best.validator_value, "array")
+
+ def test_if_the_most_relevant_error_is_oneOf_it_is_traversed(self):
+ """
+ If the most relevant error is an oneOf, then we traverse its context
+ and select the otherwise *least* relevant error, since in this case
+ that means the most specific, deep, error inside the instance.
+
+ I.e. since only one of the schemas must match, we look for the most
+ relevant one.
+
+ """
+
+ validator = Draft4Validator(
+ {
+ "properties" : {
+ "foo" : {
+ "oneOf" : [
+ {"type" : "string"},
+ {"properties" : {"bar" : {"type" : "array"}}},
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
+ self.assertEqual(best.validator_value, "array")
+
+ def test_if_the_most_relevant_error_is_allOf_it_is_traversed(self):
+ """
+ Now, if the error is allOf, we traverse but select the *most* relevant
+ error from the context, because all schemas here must match anyways.
+
+ """
+
+ validator = Draft4Validator(
+ {
+ "properties" : {
+ "foo" : {
+ "allOf" : [
+ {"type" : "string"},
+ {"properties" : {"bar" : {"type" : "array"}}},
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
+ self.assertEqual(best.validator_value, "string")
+
+ def test_nested_context_for_oneOf(self):
+ validator = Draft4Validator(
+ {
+ "properties" : {
+ "foo" : {
+ "oneOf" : [
+ {"type" : "string"},
+ {
+ "oneOf" : [
+ {"type" : "string"},
+ {
+ "properties" : {
+ "bar" : {"type" : "array"}
+ },
+ },
+ ],
+ },
+ ],
+ },
+ },
+ },
+ )
+ best = self.best_match(validator.iter_errors({"foo" : {"bar" : 12}}))
+ self.assertEqual(best.validator_value, "array")
+
+ def test_one_error(self):
+ validator = Draft4Validator({"minProperties" : 2})
+ error, = validator.iter_errors({})
+ self.assertEqual(
+ exceptions.best_match(validator.iter_errors({})).validator,
+ "minProperties",
+ )
+
+ def test_no_errors(self):
+ validator = Draft4Validator({})
+ self.assertIsNone(exceptions.best_match(validator.iter_errors({})))
+
+
+class TestByRelevance(unittest.TestCase):
+ def test_short_paths_are_better_matches(self):
+ shallow = exceptions.ValidationError("Oh no!", path=["baz"])
+ deep = exceptions.ValidationError("Oh yes!", path=["foo", "bar"])
+ match = max([shallow, deep], key=exceptions.relevance)
+ self.assertIs(match, shallow)
+
+ match = max([deep, shallow], key=exceptions.relevance)
+ self.assertIs(match, shallow)
+
+ def test_global_errors_are_even_better_matches(self):
+ shallow = exceptions.ValidationError("Oh no!", path=[])
+ deep = exceptions.ValidationError("Oh yes!", path=["foo"])
+
+ errors = sorted([shallow, deep], key=exceptions.relevance)
+ self.assertEqual(
+ [list(error.path) for error in errors],
+ [["foo"], []],
+ )
+
+ errors = sorted([deep, shallow], key=exceptions.relevance)
+ self.assertEqual(
+ [list(error.path) for error in errors],
+ [["foo"], []],
+ )
+
+ def test_weak_validators_are_lower_priority(self):
+ weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
+ normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
+
+ best_match = exceptions.by_relevance(weak="a")
+
+ match = max([weak, normal], key=best_match)
+ self.assertIs(match, normal)
+
+ match = max([normal, weak], key=best_match)
+ self.assertIs(match, normal)
+
+ def test_strong_validators_are_higher_priority(self):
+ weak = exceptions.ValidationError("Oh no!", path=[], validator="a")
+ normal = exceptions.ValidationError("Oh yes!", path=[], validator="b")
+ strong = exceptions.ValidationError("Oh fine!", path=[], validator="c")
+
+ best_match = exceptions.by_relevance(weak="a", strong="c")
+
+ match = max([weak, normal, strong], key=best_match)
+ self.assertIs(match, strong)
+
+ match = max([strong, normal, weak], key=best_match)
+ self.assertIs(match, strong)
+
+
+class TestErrorTree(unittest.TestCase):
+ def test_it_knows_how_many_total_errors_it_contains(self):
+ errors = [mock.MagicMock() for _ in range(8)]
+ tree = exceptions.ErrorTree(errors)
+ self.assertEqual(tree.total_errors, 8)
+
+ def test_it_contains_an_item_if_the_item_had_an_error(self):
+ errors = [exceptions.ValidationError("a message", path=["bar"])]
+ tree = exceptions.ErrorTree(errors)
+ self.assertIn("bar", tree)
+
+ def test_it_does_not_contain_an_item_if_the_item_had_no_error(self):
+ errors = [exceptions.ValidationError("a message", path=["bar"])]
+ tree = exceptions.ErrorTree(errors)
+ self.assertNotIn("foo", tree)
+
+ def test_validators_that_failed_appear_in_errors_dict(self):
+ error = exceptions.ValidationError("a message", validator="foo")
+ tree = exceptions.ErrorTree([error])
+ self.assertEqual(tree.errors, {"foo" : error})
+
+ def test_it_creates_a_child_tree_for_each_nested_path(self):
+ errors = [
+ exceptions.ValidationError("a bar message", path=["bar"]),
+ exceptions.ValidationError("a bar -> 0 message", path=["bar", 0]),
+ ]
+ tree = exceptions.ErrorTree(errors)
+ self.assertIn(0, tree["bar"])
+ self.assertNotIn(1, tree["bar"])
+
+ def test_children_have_their_errors_dicts_built(self):
+ e1, e2 = (
+ exceptions.ValidationError("1", validator="foo", path=["bar", 0]),
+ exceptions.ValidationError("2", validator="quux", path=["bar", 0]),
+ )
+ tree = exceptions.ErrorTree([e1, e2])
+ self.assertEqual(tree["bar"][0].errors, {"foo" : e1, "quux" : e2})
+
+ def test_it_does_not_contain_subtrees_that_are_not_in_the_instance(self):
+ error = exceptions.ValidationError("123", validator="foo", instance=[])
+ tree = exceptions.ErrorTree([error])
+
+ with self.assertRaises(IndexError):
+ tree[0]
+
+ def test_if_its_in_the_tree_anyhow_it_does_not_raise_an_error(self):
+ """
+ If a validator is dumb (like :validator:`required` in draft 3) and
+ refers to a path that isn't in the instance, the tree still properly
+ returns a subtree for that path.
+
+ """
+
+ error = exceptions.ValidationError(
+ "a message", validator="foo", instance={}, path=["foo"],
+ )
+ tree = exceptions.ErrorTree([error])
+ self.assertIsInstance(tree["foo"], exceptions.ErrorTree)
+
+
+class TestErrorReprStr(unittest.TestCase):
+ def make_error(self, **kwargs):
+ defaults = dict(
+ message=u"hello",
+ validator=u"type",
+ validator_value=u"string",
+ instance=5,
+ schema={u"type": u"string"},
+ )
+ defaults.update(kwargs)
+ return exceptions.ValidationError(**defaults)
+
+ def assertShows(self, expected, **kwargs):
+ if PY3:
+ expected = expected.replace("u'", "'")
+ expected = textwrap.dedent(expected).rstrip("\n")
+
+ error = self.make_error(**kwargs)
+ message_line, _, rest = str(error).partition("\n")
+ self.assertEqual(message_line, error.message)
+ self.assertEqual(rest, expected)
+
+ def test_repr(self):
+ self.assertEqual(
+ repr(exceptions.ValidationError(message="Hello!")),
+ "<ValidationError: %r>" % "Hello!",
+ )
+
+ def test_unset_error(self):
+ error = exceptions.ValidationError("message")
+ self.assertEqual(str(error), "message")
+
+ kwargs = {
+ "validator": "type",
+ "validator_value": "string",
+ "instance": 5,
+ "schema": {"type": "string"}
+ }
+ # Just the message should show if any of the attributes are unset
+ for attr in kwargs:
+ k = dict(kwargs)
+ del k[attr]
+ error = exceptions.ValidationError("message", **k)
+ self.assertEqual(str(error), "message")
+
+ def test_empty_paths(self):
+ self.assertShows(
+ """
+ Failed validating u'type' in schema:
+ {u'type': u'string'}
+
+ On instance:
+ 5
+ """,
+ path=[],
+ schema_path=[],
+ )
+
+ def test_one_item_paths(self):
+ self.assertShows(
+ """
+ Failed validating u'type' in schema:
+ {u'type': u'string'}
+
+ On instance[0]:
+ 5
+ """,
+ path=[0],
+ schema_path=["items"],
+ )
+
+ def test_multiple_item_paths(self):
+ self.assertShows(
+ """
+ Failed validating u'type' in schema[u'items'][0]:
+ {u'type': u'string'}
+
+ On instance[0][u'a']:
+ 5
+ """,
+ path=[0, u"a"],
+ schema_path=[u"items", 0, 1],
+ )
+
+ def test_uses_pprint(self):
+ with mock.patch("pprint.pformat") as pformat:
+ str(self.make_error())
+ self.assertEqual(pformat.call_count, 2) # schema + instance
+
+ def test_str_works_with_instances_having_overriden_eq_operator(self):
+ """
+ Check for https://github.com/Julian/jsonschema/issues/164 which
+ rendered exceptions unusable when a `ValidationError` involved
+ instances with an `__eq__` method that returned truthy values.
+
+ """
+
+ instance = mock.MagicMock()
+ error = exceptions.ValidationError(
+ "a message",
+ validator="foo",
+ instance=instance,
+ validator_value="some",
+ schema="schema",
+ )
+ str(error)
+ self.assertFalse(instance.__eq__.called)
diff --git a/lib/spack/external/jsonschema/tests/test_format.py b/lib/spack/external/jsonschema/tests/test_format.py
new file mode 100644
index 0000000000..8392ca1de3
--- /dev/null
+++ b/lib/spack/external/jsonschema/tests/test_format.py
@@ -0,0 +1,63 @@
+"""
+Tests for the parts of jsonschema related to the :validator:`format` property.
+
+"""
+
+from jsonschema.tests.compat import mock, unittest
+
+from jsonschema import FormatError, ValidationError, FormatChecker
+from jsonschema.validators import Draft4Validator
+
+
+class TestFormatChecker(unittest.TestCase):
+ def setUp(self):
+ self.fn = mock.Mock()
+
+ def test_it_can_validate_no_formats(self):
+ checker = FormatChecker(formats=())
+ self.assertFalse(checker.checkers)
+
+ def test_it_raises_a_key_error_for_unknown_formats(self):
+ with self.assertRaises(KeyError):
+ FormatChecker(formats=["o noes"])
+
+ def test_it_can_register_cls_checkers(self):
+ with mock.patch.dict(FormatChecker.checkers, clear=True):
+ FormatChecker.cls_checks("new")(self.fn)
+ self.assertEqual(FormatChecker.checkers, {"new" : (self.fn, ())})
+
+ def test_it_can_register_checkers(self):
+ checker = FormatChecker()
+ checker.checks("new")(self.fn)
+ self.assertEqual(
+ checker.checkers,
+ dict(FormatChecker.checkers, new=(self.fn, ()))
+ )
+
+ def test_it_catches_registered_errors(self):
+ checker = FormatChecker()
+ cause = self.fn.side_effect = ValueError()
+
+ checker.checks("foo", raises=ValueError)(self.fn)
+
+ with self.assertRaises(FormatError) as cm:
+ checker.check("bar", "foo")
+
+ self.assertIs(cm.exception.cause, cause)
+ self.assertIs(cm.exception.__cause__, cause)
+
+ # Unregistered errors should not be caught
+ self.fn.side_effect = AttributeError
+ with self.assertRaises(AttributeError):
+ checker.check("bar", "foo")
+
+ def test_format_error_causes_become_validation_error_causes(self):
+ checker = FormatChecker()
+ checker.checks("foo", raises=ValueError)(self.fn)
+ cause = self.fn.side_effect = ValueError()
+ validator = Draft4Validator({"format" : "foo"}, format_checker=checker)
+
+ with self.assertRaises(ValidationError) as cm:
+ validator.validate("bar")
+
+ self.assertIs(cm.exception.__cause__, cause)
diff --git a/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py b/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py
new file mode 100644
index 0000000000..75c6857bc0
--- /dev/null
+++ b/lib/spack/external/jsonschema/tests/test_jsonschema_test_suite.py
@@ -0,0 +1,290 @@
+"""
+Test runner for the JSON Schema official test suite
+
+Tests comprehensive correctness of each draft's validator.
+
+See https://github.com/json-schema/JSON-Schema-Test-Suite for details.
+
+"""
+
+from contextlib import closing
+from decimal import Decimal
+import glob
+import json
+import io
+import itertools
+import os
+import re
+import subprocess
+import sys
+
+try:
+ from sys import pypy_version_info
+except ImportError:
+ pypy_version_info = None
+
+from jsonschema import (
+ FormatError, SchemaError, ValidationError, Draft3Validator,
+ Draft4Validator, FormatChecker, draft3_format_checker,
+ draft4_format_checker, validate,
+)
+from jsonschema.compat import PY3
+from jsonschema.tests.compat import mock, unittest
+import jsonschema
+
+
+REPO_ROOT = os.path.join(os.path.dirname(jsonschema.__file__), os.path.pardir)
+SUITE = os.getenv("JSON_SCHEMA_TEST_SUITE", os.path.join(REPO_ROOT, "json"))
+
+if not os.path.isdir(SUITE):
+ raise ValueError(
+ "Can't find the JSON-Schema-Test-Suite directory. Set the "
+ "'JSON_SCHEMA_TEST_SUITE' environment variable or run the tests from "
+ "alongside a checkout of the suite."
+ )
+
+TESTS_DIR = os.path.join(SUITE, "tests")
+JSONSCHEMA_SUITE = os.path.join(SUITE, "bin", "jsonschema_suite")
+
+remotes_stdout = subprocess.Popen(
+ ["python", JSONSCHEMA_SUITE, "remotes"], stdout=subprocess.PIPE,
+).stdout
+
+with closing(remotes_stdout):
+ if PY3:
+ remotes_stdout = io.TextIOWrapper(remotes_stdout)
+ REMOTES = json.load(remotes_stdout)
+
+
+def make_case(schema, data, valid, name):
+ if valid:
+ def test_case(self):
+ kwargs = getattr(self, "validator_kwargs", {})
+ validate(data, schema, cls=self.validator_class, **kwargs)
+ else:
+ def test_case(self):
+ kwargs = getattr(self, "validator_kwargs", {})
+ with self.assertRaises(ValidationError):
+ validate(data, schema, cls=self.validator_class, **kwargs)
+
+ if not PY3:
+ name = name.encode("utf-8")
+ test_case.__name__ = name
+
+ return test_case
+
+
+def maybe_skip(skip, test_case, case, test):
+ if skip is not None:
+ reason = skip(case, test)
+ if reason is not None:
+ test_case = unittest.skip(reason)(test_case)
+ return test_case
+
+
+def load_json_cases(tests_glob, ignore_glob="", basedir=TESTS_DIR, skip=None):
+ if ignore_glob:
+ ignore_glob = os.path.join(basedir, ignore_glob)
+
+ def add_test_methods(test_class):
+ ignored = set(glob.iglob(ignore_glob))
+
+ for filename in glob.iglob(os.path.join(basedir, tests_glob)):
+ if filename in ignored:
+ continue
+
+ validating, _ = os.path.splitext(os.path.basename(filename))
+ id = itertools.count(1)
+
+ with open(filename) as test_file:
+ for case in json.load(test_file):
+ for test in case["tests"]:
+ name = "test_%s_%s_%s" % (
+ validating,
+ next(id),
+ re.sub(r"[\W ]+", "_", test["description"]),
+ )
+ assert not hasattr(test_class, name), name
+
+ test_case = make_case(
+ data=test["data"],
+ schema=case["schema"],
+ valid=test["valid"],
+ name=name,
+ )
+ test_case = maybe_skip(skip, test_case, case, test)
+ setattr(test_class, name, test_case)
+
+ return test_class
+ return add_test_methods
+
+
+class TypesMixin(object):
+ @unittest.skipIf(PY3, "In Python 3 json.load always produces unicode")
+ def test_string_a_bytestring_is_a_string(self):
+ self.validator_class({"type" : "string"}).validate(b"foo")
+
+
+class DecimalMixin(object):
+ def test_it_can_validate_with_decimals(self):
+ schema = {"type" : "number"}
+ validator = self.validator_class(
+ schema, types={"number" : (int, float, Decimal)}
+ )
+
+ for valid in [1, 1.1, Decimal(1) / Decimal(8)]:
+ validator.validate(valid)
+
+ for invalid in ["foo", {}, [], True, None]:
+ with self.assertRaises(ValidationError):
+ validator.validate(invalid)
+
+
+def missing_format(checker):
+ def missing_format(case, test):
+ format = case["schema"].get("format")
+ if format not in checker.checkers:
+ return "Format checker {0!r} not found.".format(format)
+ elif (
+ format == "date-time" and
+ pypy_version_info is not None and
+ pypy_version_info[:2] <= (1, 9)
+ ):
+ # datetime.datetime is overzealous about typechecking in <=1.9
+ return "datetime.datetime is broken on this version of PyPy."
+ return missing_format
+
+
+class FormatMixin(object):
+ def test_it_returns_true_for_formats_it_does_not_know_about(self):
+ validator = self.validator_class(
+ {"format" : "carrot"}, format_checker=FormatChecker(),
+ )
+ validator.validate("bugs")
+
+ def test_it_does_not_validate_formats_by_default(self):
+ validator = self.validator_class({})
+ self.assertIsNone(validator.format_checker)
+
+ def test_it_validates_formats_if_a_checker_is_provided(self):
+ checker = mock.Mock(spec=FormatChecker)
+ validator = self.validator_class(
+ {"format" : "foo"}, format_checker=checker,
+ )
+
+ validator.validate("bar")
+
+ checker.check.assert_called_once_with("bar", "foo")
+
+ cause = ValueError()
+ checker.check.side_effect = FormatError('aoeu', cause=cause)
+
+ with self.assertRaises(ValidationError) as cm:
+ validator.validate("bar")
+ # Make sure original cause is attached
+ self.assertIs(cm.exception.cause, cause)
+
+ def test_it_validates_formats_of_any_type(self):
+ checker = mock.Mock(spec=FormatChecker)
+ validator = self.validator_class(
+ {"format" : "foo"}, format_checker=checker,
+ )
+
+ validator.validate([1, 2, 3])
+
+ checker.check.assert_called_once_with([1, 2, 3], "foo")
+
+ cause = ValueError()
+ checker.check.side_effect = FormatError('aoeu', cause=cause)
+
+ with self.assertRaises(ValidationError) as cm:
+ validator.validate([1, 2, 3])
+ # Make sure original cause is attached
+ self.assertIs(cm.exception.cause, cause)
+
+
+if sys.maxunicode == 2 ** 16 - 1: # This is a narrow build.
+ def narrow_unicode_build(case, test):
+ if "supplementary Unicode" in test["description"]:
+ return "Not running surrogate Unicode case, this Python is narrow."
+else:
+ def narrow_unicode_build(case, test): # This isn't, skip nothing.
+ return
+
+
+@load_json_cases(
+ "draft3/*.json",
+ skip=narrow_unicode_build,
+ ignore_glob="draft3/refRemote.json",
+)
+@load_json_cases(
+ "draft3/optional/format.json", skip=missing_format(draft3_format_checker)
+)
+@load_json_cases("draft3/optional/bignum.json")
+@load_json_cases("draft3/optional/zeroTerminatedFloats.json")
+class TestDraft3(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
+ validator_class = Draft3Validator
+ validator_kwargs = {"format_checker" : draft3_format_checker}
+
+ def test_any_type_is_valid_for_type_any(self):
+ validator = self.validator_class({"type" : "any"})
+ validator.validate(mock.Mock())
+
+ # TODO: we're in need of more meta schema tests
+ def test_invalid_properties(self):
+ with self.assertRaises(SchemaError):
+ validate({}, {"properties": {"test": True}},
+ cls=self.validator_class)
+
+ def test_minItems_invalid_string(self):
+ with self.assertRaises(SchemaError):
+ # needs to be an integer
+ validate([1], {"minItems" : "1"}, cls=self.validator_class)
+
+
+@load_json_cases(
+ "draft4/*.json",
+ skip=narrow_unicode_build,
+ ignore_glob="draft4/refRemote.json",
+)
+@load_json_cases(
+ "draft4/optional/format.json", skip=missing_format(draft4_format_checker)
+)
+@load_json_cases("draft4/optional/bignum.json")
+@load_json_cases("draft4/optional/zeroTerminatedFloats.json")
+class TestDraft4(unittest.TestCase, TypesMixin, DecimalMixin, FormatMixin):
+ validator_class = Draft4Validator
+ validator_kwargs = {"format_checker" : draft4_format_checker}
+
+ # TODO: we're in need of more meta schema tests
+ def test_invalid_properties(self):
+ with self.assertRaises(SchemaError):
+ validate({}, {"properties": {"test": True}},
+ cls=self.validator_class)
+
+ def test_minItems_invalid_string(self):
+ with self.assertRaises(SchemaError):
+ # needs to be an integer
+ validate([1], {"minItems" : "1"}, cls=self.validator_class)
+
+
+class RemoteRefResolutionMixin(object):
+ def setUp(self):
+ patch = mock.patch("jsonschema.validators.requests")
+ requests = patch.start()
+ requests.get.side_effect = self.resolve
+ self.addCleanup(patch.stop)
+
+ def resolve(self, reference):
+ _, _, reference = reference.partition("http://localhost:1234/")
+ return mock.Mock(**{"json.return_value" : REMOTES.get(reference)})
+
+
+@load_json_cases("draft3/refRemote.json")
+class Draft3RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
+ validator_class = Draft3Validator
+
+
+@load_json_cases("draft4/refRemote.json")
+class Draft4RemoteResolution(RemoteRefResolutionMixin, unittest.TestCase):
+ validator_class = Draft4Validator
diff --git a/lib/spack/external/jsonschema/tests/test_validators.py b/lib/spack/external/jsonschema/tests/test_validators.py
new file mode 100644
index 0000000000..f8692388ea
--- /dev/null
+++ b/lib/spack/external/jsonschema/tests/test_validators.py
@@ -0,0 +1,786 @@
+from collections import deque
+from contextlib import contextmanager
+import json
+
+from jsonschema import FormatChecker, ValidationError
+from jsonschema.tests.compat import mock, unittest
+from jsonschema.validators import (
+ RefResolutionError, UnknownType, Draft3Validator,
+ Draft4Validator, RefResolver, create, extend, validator_for, validate,
+)
+
+
+class TestCreateAndExtend(unittest.TestCase):
+ def setUp(self):
+ self.meta_schema = {u"properties" : {u"smelly" : {}}}
+ self.smelly = mock.MagicMock()
+ self.validators = {u"smelly" : self.smelly}
+ self.types = {u"dict" : dict}
+ self.Validator = create(
+ meta_schema=self.meta_schema,
+ validators=self.validators,
+ default_types=self.types,
+ )
+
+ self.validator_value = 12
+ self.schema = {u"smelly" : self.validator_value}
+ self.validator = self.Validator(self.schema)
+
+ def test_attrs(self):
+ self.assertEqual(self.Validator.VALIDATORS, self.validators)
+ self.assertEqual(self.Validator.META_SCHEMA, self.meta_schema)
+ self.assertEqual(self.Validator.DEFAULT_TYPES, self.types)
+
+ def test_init(self):
+ self.assertEqual(self.validator.schema, self.schema)
+
+ def test_iter_errors(self):
+ instance = "hello"
+
+ self.smelly.return_value = []
+ self.assertEqual(list(self.validator.iter_errors(instance)), [])
+
+ error = mock.Mock()
+ self.smelly.return_value = [error]
+ self.assertEqual(list(self.validator.iter_errors(instance)), [error])
+
+ self.smelly.assert_called_with(
+ self.validator, self.validator_value, instance, self.schema,
+ )
+
+ def test_if_a_version_is_provided_it_is_registered(self):
+ with mock.patch("jsonschema.validators.validates") as validates:
+ validates.side_effect = lambda version : lambda cls : cls
+ Validator = create(meta_schema={u"id" : ""}, version="my version")
+ validates.assert_called_once_with("my version")
+ self.assertEqual(Validator.__name__, "MyVersionValidator")
+
+ def test_if_a_version_is_not_provided_it_is_not_registered(self):
+ with mock.patch("jsonschema.validators.validates") as validates:
+ create(meta_schema={u"id" : "id"})
+ self.assertFalse(validates.called)
+
+ def test_extend(self):
+ validators = dict(self.Validator.VALIDATORS)
+ new = mock.Mock()
+
+ Extended = extend(self.Validator, validators={u"a new one" : new})
+
+ validators.update([(u"a new one", new)])
+ self.assertEqual(Extended.VALIDATORS, validators)
+ self.assertNotIn(u"a new one", self.Validator.VALIDATORS)
+
+ self.assertEqual(Extended.META_SCHEMA, self.Validator.META_SCHEMA)
+ self.assertEqual(Extended.DEFAULT_TYPES, self.Validator.DEFAULT_TYPES)
+
+
+class TestIterErrors(unittest.TestCase):
+ def setUp(self):
+ self.validator = Draft3Validator({})
+
+ def test_iter_errors(self):
+ instance = [1, 2]
+ schema = {
+ u"disallow" : u"array",
+ u"enum" : [["a", "b", "c"], ["d", "e", "f"]],
+ u"minItems" : 3
+ }
+
+ got = (e.message for e in self.validator.iter_errors(instance, schema))
+ expected = [
+ "%r is disallowed for [1, 2]" % (schema["disallow"],),
+ "[1, 2] is too short",
+ "[1, 2] is not one of %r" % (schema["enum"],),
+ ]
+ self.assertEqual(sorted(got), sorted(expected))
+
+ def test_iter_errors_multiple_failures_one_validator(self):
+ instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
+ schema = {
+ u"properties" : {
+ "foo" : {u"type" : "string"},
+ "bar" : {u"minItems" : 2},
+ "baz" : {u"maximum" : 10, u"enum" : [2, 4, 6, 8]},
+ }
+ }
+
+ errors = list(self.validator.iter_errors(instance, schema))
+ self.assertEqual(len(errors), 4)
+
+
+class TestValidationErrorMessages(unittest.TestCase):
+ def message_for(self, instance, schema, *args, **kwargs):
+ kwargs.setdefault("cls", Draft3Validator)
+ with self.assertRaises(ValidationError) as e:
+ validate(instance, schema, *args, **kwargs)
+ return e.exception.message
+
+ def test_single_type_failure(self):
+ message = self.message_for(instance=1, schema={u"type" : u"string"})
+ self.assertEqual(message, "1 is not of type %r" % u"string")
+
+ def test_single_type_list_failure(self):
+ message = self.message_for(instance=1, schema={u"type" : [u"string"]})
+ self.assertEqual(message, "1 is not of type %r" % u"string")
+
+ def test_multiple_type_failure(self):
+ types = u"string", u"object"
+ message = self.message_for(instance=1, schema={u"type" : list(types)})
+ self.assertEqual(message, "1 is not of type %r, %r" % types)
+
+ def test_object_without_title_type_failure(self):
+ type = {u"type" : [{u"minimum" : 3}]}
+ message = self.message_for(instance=1, schema={u"type" : [type]})
+ self.assertEqual(message, "1 is not of type %r" % (type,))
+
+ def test_object_with_name_type_failure(self):
+ name = "Foo"
+ schema = {u"type" : [{u"name" : name, u"minimum" : 3}]}
+ message = self.message_for(instance=1, schema=schema)
+ self.assertEqual(message, "1 is not of type %r" % (name,))
+
+ def test_minimum(self):
+ message = self.message_for(instance=1, schema={"minimum" : 2})
+ self.assertEqual(message, "1 is less than the minimum of 2")
+
+ def test_maximum(self):
+ message = self.message_for(instance=1, schema={"maximum" : 0})
+ self.assertEqual(message, "1 is greater than the maximum of 0")
+
+ def test_dependencies_failure_has_single_element_not_list(self):
+ depend, on = "bar", "foo"
+ schema = {u"dependencies" : {depend : on}}
+ message = self.message_for({"bar" : 2}, schema)
+ self.assertEqual(message, "%r is a dependency of %r" % (on, depend))
+
+ def test_additionalItems_single_failure(self):
+ message = self.message_for(
+ [2], {u"items" : [], u"additionalItems" : False},
+ )
+ self.assertIn("(2 was unexpected)", message)
+
+ def test_additionalItems_multiple_failures(self):
+ message = self.message_for(
+ [1, 2, 3], {u"items" : [], u"additionalItems" : False}
+ )
+ self.assertIn("(1, 2, 3 were unexpected)", message)
+
+ def test_additionalProperties_single_failure(self):
+ additional = "foo"
+ schema = {u"additionalProperties" : False}
+ message = self.message_for({additional : 2}, schema)
+ self.assertIn("(%r was unexpected)" % (additional,), message)
+
+ def test_additionalProperties_multiple_failures(self):
+ schema = {u"additionalProperties" : False}
+ message = self.message_for(dict.fromkeys(["foo", "bar"]), schema)
+
+ self.assertIn(repr("foo"), message)
+ self.assertIn(repr("bar"), message)
+ self.assertIn("were unexpected)", message)
+
+ def test_invalid_format_default_message(self):
+ checker = FormatChecker(formats=())
+ check_fn = mock.Mock(return_value=False)
+ checker.checks(u"thing")(check_fn)
+
+ schema = {u"format" : u"thing"}
+ message = self.message_for("bla", schema, format_checker=checker)
+
+ self.assertIn(repr("bla"), message)
+ self.assertIn(repr("thing"), message)
+ self.assertIn("is not a", message)
+
+
+class TestValidationErrorDetails(unittest.TestCase):
+ # TODO: These really need unit tests for each individual validator, rather
+ # than just these higher level tests.
+ def test_anyOf(self):
+ instance = 5
+ schema = {
+ "anyOf": [
+ {"minimum": 20},
+ {"type": "string"}
+ ]
+ }
+
+ validator = Draft4Validator(schema)
+ errors = list(validator.iter_errors(instance))
+ self.assertEqual(len(errors), 1)
+ e = errors[0]
+
+ self.assertEqual(e.validator, "anyOf")
+ self.assertEqual(e.validator_value, schema["anyOf"])
+ self.assertEqual(e.instance, instance)
+ self.assertEqual(e.schema, schema)
+ self.assertIsNone(e.parent)
+
+ self.assertEqual(e.path, deque([]))
+ self.assertEqual(e.relative_path, deque([]))
+ self.assertEqual(e.absolute_path, deque([]))
+
+ self.assertEqual(e.schema_path, deque(["anyOf"]))
+ self.assertEqual(e.relative_schema_path, deque(["anyOf"]))
+ self.assertEqual(e.absolute_schema_path, deque(["anyOf"]))
+
+ self.assertEqual(len(e.context), 2)
+
+ e1, e2 = sorted_errors(e.context)
+
+ self.assertEqual(e1.validator, "minimum")
+ self.assertEqual(e1.validator_value, schema["anyOf"][0]["minimum"])
+ self.assertEqual(e1.instance, instance)
+ self.assertEqual(e1.schema, schema["anyOf"][0])
+ self.assertIs(e1.parent, e)
+
+ self.assertEqual(e1.path, deque([]))
+ self.assertEqual(e1.absolute_path, deque([]))
+ self.assertEqual(e1.relative_path, deque([]))
+
+ self.assertEqual(e1.schema_path, deque([0, "minimum"]))
+ self.assertEqual(e1.relative_schema_path, deque([0, "minimum"]))
+ self.assertEqual(
+ e1.absolute_schema_path, deque(["anyOf", 0, "minimum"]),
+ )
+
+ self.assertFalse(e1.context)
+
+ self.assertEqual(e2.validator, "type")
+ self.assertEqual(e2.validator_value, schema["anyOf"][1]["type"])
+ self.assertEqual(e2.instance, instance)
+ self.assertEqual(e2.schema, schema["anyOf"][1])
+ self.assertIs(e2.parent, e)
+
+ self.assertEqual(e2.path, deque([]))
+ self.assertEqual(e2.relative_path, deque([]))
+ self.assertEqual(e2.absolute_path, deque([]))
+
+ self.assertEqual(e2.schema_path, deque([1, "type"]))
+ self.assertEqual(e2.relative_schema_path, deque([1, "type"]))
+ self.assertEqual(e2.absolute_schema_path, deque(["anyOf", 1, "type"]))
+
+ self.assertEqual(len(e2.context), 0)
+
+ def test_type(self):
+ instance = {"foo": 1}
+ schema = {
+ "type": [
+ {"type": "integer"},
+ {
+ "type": "object",
+ "properties": {
+ "foo": {"enum": [2]}
+ }
+ }
+ ]
+ }
+
+ validator = Draft3Validator(schema)
+ errors = list(validator.iter_errors(instance))
+ self.assertEqual(len(errors), 1)
+ e = errors[0]
+
+ self.assertEqual(e.validator, "type")
+ self.assertEqual(e.validator_value, schema["type"])
+ self.assertEqual(e.instance, instance)
+ self.assertEqual(e.schema, schema)
+ self.assertIsNone(e.parent)
+
+ self.assertEqual(e.path, deque([]))
+ self.assertEqual(e.relative_path, deque([]))
+ self.assertEqual(e.absolute_path, deque([]))
+
+ self.assertEqual(e.schema_path, deque(["type"]))
+ self.assertEqual(e.relative_schema_path, deque(["type"]))
+ self.assertEqual(e.absolute_schema_path, deque(["type"]))
+
+ self.assertEqual(len(e.context), 2)
+
+ e1, e2 = sorted_errors(e.context)
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e1.validator_value, schema["type"][0]["type"])
+ self.assertEqual(e1.instance, instance)
+ self.assertEqual(e1.schema, schema["type"][0])
+ self.assertIs(e1.parent, e)
+
+ self.assertEqual(e1.path, deque([]))
+ self.assertEqual(e1.relative_path, deque([]))
+ self.assertEqual(e1.absolute_path, deque([]))
+
+ self.assertEqual(e1.schema_path, deque([0, "type"]))
+ self.assertEqual(e1.relative_schema_path, deque([0, "type"]))
+ self.assertEqual(e1.absolute_schema_path, deque(["type", 0, "type"]))
+
+ self.assertFalse(e1.context)
+
+ self.assertEqual(e2.validator, "enum")
+ self.assertEqual(e2.validator_value, [2])
+ self.assertEqual(e2.instance, 1)
+ self.assertEqual(e2.schema, {u"enum" : [2]})
+ self.assertIs(e2.parent, e)
+
+ self.assertEqual(e2.path, deque(["foo"]))
+ self.assertEqual(e2.relative_path, deque(["foo"]))
+ self.assertEqual(e2.absolute_path, deque(["foo"]))
+
+ self.assertEqual(
+ e2.schema_path, deque([1, "properties", "foo", "enum"]),
+ )
+ self.assertEqual(
+ e2.relative_schema_path, deque([1, "properties", "foo", "enum"]),
+ )
+ self.assertEqual(
+ e2.absolute_schema_path,
+ deque(["type", 1, "properties", "foo", "enum"]),
+ )
+
+ self.assertFalse(e2.context)
+
+ def test_single_nesting(self):
+ instance = {"foo" : 2, "bar" : [1], "baz" : 15, "quux" : "spam"}
+ schema = {
+ "properties" : {
+ "foo" : {"type" : "string"},
+ "bar" : {"minItems" : 2},
+ "baz" : {"maximum" : 10, "enum" : [2, 4, 6, 8]},
+ }
+ }
+
+ validator = Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2, e3, e4 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque(["bar"]))
+ self.assertEqual(e2.path, deque(["baz"]))
+ self.assertEqual(e3.path, deque(["baz"]))
+ self.assertEqual(e4.path, deque(["foo"]))
+
+ self.assertEqual(e1.relative_path, deque(["bar"]))
+ self.assertEqual(e2.relative_path, deque(["baz"]))
+ self.assertEqual(e3.relative_path, deque(["baz"]))
+ self.assertEqual(e4.relative_path, deque(["foo"]))
+
+ self.assertEqual(e1.absolute_path, deque(["bar"]))
+ self.assertEqual(e2.absolute_path, deque(["baz"]))
+ self.assertEqual(e3.absolute_path, deque(["baz"]))
+ self.assertEqual(e4.absolute_path, deque(["foo"]))
+
+ self.assertEqual(e1.validator, "minItems")
+ self.assertEqual(e2.validator, "enum")
+ self.assertEqual(e3.validator, "maximum")
+ self.assertEqual(e4.validator, "type")
+
+ def test_multiple_nesting(self):
+ instance = [1, {"foo" : 2, "bar" : {"baz" : [1]}}, "quux"]
+ schema = {
+ "type" : "string",
+ "items" : {
+ "type" : ["string", "object"],
+ "properties" : {
+ "foo" : {"enum" : [1, 3]},
+ "bar" : {
+ "type" : "array",
+ "properties" : {
+ "bar" : {"required" : True},
+ "baz" : {"minItems" : 2},
+ }
+ }
+ }
+ }
+ }
+
+ validator = Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2, e3, e4, e5, e6 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque([]))
+ self.assertEqual(e2.path, deque([0]))
+ self.assertEqual(e3.path, deque([1, "bar"]))
+ self.assertEqual(e4.path, deque([1, "bar", "bar"]))
+ self.assertEqual(e5.path, deque([1, "bar", "baz"]))
+ self.assertEqual(e6.path, deque([1, "foo"]))
+
+ self.assertEqual(e1.schema_path, deque(["type"]))
+ self.assertEqual(e2.schema_path, deque(["items", "type"]))
+ self.assertEqual(
+ list(e3.schema_path), ["items", "properties", "bar", "type"],
+ )
+ self.assertEqual(
+ list(e4.schema_path),
+ ["items", "properties", "bar", "properties", "bar", "required"],
+ )
+ self.assertEqual(
+ list(e5.schema_path),
+ ["items", "properties", "bar", "properties", "baz", "minItems"]
+ )
+ self.assertEqual(
+ list(e6.schema_path), ["items", "properties", "foo", "enum"],
+ )
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "type")
+ self.assertEqual(e3.validator, "type")
+ self.assertEqual(e4.validator, "required")
+ self.assertEqual(e5.validator, "minItems")
+ self.assertEqual(e6.validator, "enum")
+
+ def test_additionalProperties(self):
+ instance = {"bar": "bar", "foo": 2}
+ schema = {
+ "additionalProperties" : {"type": "integer", "minimum": 5}
+ }
+
+ validator = Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque(["bar"]))
+ self.assertEqual(e2.path, deque(["foo"]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+ def test_patternProperties(self):
+ instance = {"bar": 1, "foo": 2}
+ schema = {
+ "patternProperties" : {
+ "bar": {"type": "string"},
+ "foo": {"minimum": 5}
+ }
+ }
+
+ validator = Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque(["bar"]))
+ self.assertEqual(e2.path, deque(["foo"]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+ def test_additionalItems(self):
+ instance = ["foo", 1]
+ schema = {
+ "items": [],
+ "additionalItems" : {"type": "integer", "minimum": 5}
+ }
+
+ validator = Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque([0]))
+ self.assertEqual(e2.path, deque([1]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+ def test_additionalItems_with_items(self):
+ instance = ["foo", "bar", 1]
+ schema = {
+ "items": [{}],
+ "additionalItems" : {"type": "integer", "minimum": 5}
+ }
+
+ validator = Draft3Validator(schema)
+ errors = validator.iter_errors(instance)
+ e1, e2 = sorted_errors(errors)
+
+ self.assertEqual(e1.path, deque([1]))
+ self.assertEqual(e2.path, deque([2]))
+
+ self.assertEqual(e1.validator, "type")
+ self.assertEqual(e2.validator, "minimum")
+
+
+class ValidatorTestMixin(object):
+ def setUp(self):
+ self.instance = mock.Mock()
+ self.schema = {}
+ self.resolver = mock.Mock()
+ self.validator = self.validator_class(self.schema)
+
+ def test_valid_instances_are_valid(self):
+ errors = iter([])
+
+ with mock.patch.object(
+ self.validator, "iter_errors", return_value=errors,
+ ):
+ self.assertTrue(
+ self.validator.is_valid(self.instance, self.schema)
+ )
+
+ def test_invalid_instances_are_not_valid(self):
+ errors = iter([mock.Mock()])
+
+ with mock.patch.object(
+ self.validator, "iter_errors", return_value=errors,
+ ):
+ self.assertFalse(
+ self.validator.is_valid(self.instance, self.schema)
+ )
+
+ def test_non_existent_properties_are_ignored(self):
+ instance, my_property, my_value = mock.Mock(), mock.Mock(), mock.Mock()
+ validate(instance=instance, schema={my_property : my_value})
+
+ def test_it_creates_a_ref_resolver_if_not_provided(self):
+ self.assertIsInstance(self.validator.resolver, RefResolver)
+
+ def test_it_delegates_to_a_ref_resolver(self):
+ resolver = RefResolver("", {})
+ schema = {"$ref" : mock.Mock()}
+
+ @contextmanager
+ def resolving():
+ yield {"type": "integer"}
+
+ with mock.patch.object(resolver, "resolving") as resolve:
+ resolve.return_value = resolving()
+ with self.assertRaises(ValidationError):
+ self.validator_class(schema, resolver=resolver).validate(None)
+
+ resolve.assert_called_once_with(schema["$ref"])
+
+ def test_is_type_is_true_for_valid_type(self):
+ self.assertTrue(self.validator.is_type("foo", "string"))
+
+ def test_is_type_is_false_for_invalid_type(self):
+ self.assertFalse(self.validator.is_type("foo", "array"))
+
+ def test_is_type_evades_bool_inheriting_from_int(self):
+ self.assertFalse(self.validator.is_type(True, "integer"))
+ self.assertFalse(self.validator.is_type(True, "number"))
+
+ def test_is_type_raises_exception_for_unknown_type(self):
+ with self.assertRaises(UnknownType):
+ self.validator.is_type("foo", object())
+
+
+class TestDraft3Validator(ValidatorTestMixin, unittest.TestCase):
+ validator_class = Draft3Validator
+
+ def test_is_type_is_true_for_any_type(self):
+ self.assertTrue(self.validator.is_valid(mock.Mock(), {"type": "any"}))
+
+ def test_is_type_does_not_evade_bool_if_it_is_being_tested(self):
+ self.assertTrue(self.validator.is_type(True, "boolean"))
+ self.assertTrue(self.validator.is_valid(True, {"type": "any"}))
+
+ def test_non_string_custom_types(self):
+ schema = {'type': [None]}
+ cls = self.validator_class(schema, types={None: type(None)})
+ cls.validate(None, schema)
+
+
+class TestDraft4Validator(ValidatorTestMixin, unittest.TestCase):
+ validator_class = Draft4Validator
+
+
+class TestBuiltinFormats(unittest.TestCase):
+ """
+ The built-in (specification-defined) formats do not raise type errors.
+
+ If an instance or value is not a string, it should be ignored.
+
+ """
+
+
+for format in FormatChecker.checkers:
+ def test(self, format=format):
+ v = Draft4Validator({"format": format}, format_checker=FormatChecker())
+ v.validate(123)
+
+ name = "test_{0}_ignores_non_strings".format(format)
+ test.__name__ = name
+ setattr(TestBuiltinFormats, name, test)
+ del test # Ugh py.test. Stop discovering top level tests.
+
+
+class TestValidatorFor(unittest.TestCase):
+ def test_draft_3(self):
+ schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
+ self.assertIs(validator_for(schema), Draft3Validator)
+
+ schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
+ self.assertIs(validator_for(schema), Draft3Validator)
+
+ def test_draft_4(self):
+ schema = {"$schema" : "http://json-schema.org/draft-04/schema"}
+ self.assertIs(validator_for(schema), Draft4Validator)
+
+ schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
+ self.assertIs(validator_for(schema), Draft4Validator)
+
+ def test_custom_validator(self):
+ Validator = create(meta_schema={"id" : "meta schema id"}, version="12")
+ schema = {"$schema" : "meta schema id"}
+ self.assertIs(validator_for(schema), Validator)
+
+ def test_validator_for_jsonschema_default(self):
+ self.assertIs(validator_for({}), Draft4Validator)
+
+ def test_validator_for_custom_default(self):
+ self.assertIs(validator_for({}, default=None), None)
+
+
+class TestValidate(unittest.TestCase):
+ def test_draft3_validator_is_chosen(self):
+ schema = {"$schema" : "http://json-schema.org/draft-03/schema#"}
+ with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
+ validate({}, schema)
+ chk_schema.assert_called_once_with(schema)
+ # Make sure it works without the empty fragment
+ schema = {"$schema" : "http://json-schema.org/draft-03/schema"}
+ with mock.patch.object(Draft3Validator, "check_schema") as chk_schema:
+ validate({}, schema)
+ chk_schema.assert_called_once_with(schema)
+
+ def test_draft4_validator_is_chosen(self):
+ schema = {"$schema" : "http://json-schema.org/draft-04/schema#"}
+ with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
+ validate({}, schema)
+ chk_schema.assert_called_once_with(schema)
+
+ def test_draft4_validator_is_the_default(self):
+ with mock.patch.object(Draft4Validator, "check_schema") as chk_schema:
+ validate({}, {})
+ chk_schema.assert_called_once_with({})
+
+
+class TestRefResolver(unittest.TestCase):
+
+ base_uri = ""
+ stored_uri = "foo://stored"
+ stored_schema = {"stored" : "schema"}
+
+ def setUp(self):
+ self.referrer = {}
+ self.store = {self.stored_uri : self.stored_schema}
+ self.resolver = RefResolver(self.base_uri, self.referrer, self.store)
+
+ def test_it_does_not_retrieve_schema_urls_from_the_network(self):
+ ref = Draft3Validator.META_SCHEMA["id"]
+ with mock.patch.object(self.resolver, "resolve_remote") as remote:
+ with self.resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, Draft3Validator.META_SCHEMA)
+ self.assertFalse(remote.called)
+
+ def test_it_resolves_local_refs(self):
+ ref = "#/properties/foo"
+ self.referrer["properties"] = {"foo" : object()}
+ with self.resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, self.referrer["properties"]["foo"])
+
+ def test_it_resolves_local_refs_with_id(self):
+ schema = {"id": "foo://bar/schema#", "a": {"foo": "bar"}}
+ resolver = RefResolver.from_schema(schema)
+ with resolver.resolving("#/a") as resolved:
+ self.assertEqual(resolved, schema["a"])
+ with resolver.resolving("foo://bar/schema#/a") as resolved:
+ self.assertEqual(resolved, schema["a"])
+
+ def test_it_retrieves_stored_refs(self):
+ with self.resolver.resolving(self.stored_uri) as resolved:
+ self.assertIs(resolved, self.stored_schema)
+
+ self.resolver.store["cached_ref"] = {"foo" : 12}
+ with self.resolver.resolving("cached_ref#/foo") as resolved:
+ self.assertEqual(resolved, 12)
+
+ def test_it_retrieves_unstored_refs_via_requests(self):
+ ref = "http://bar#baz"
+ schema = {"baz" : 12}
+
+ with mock.patch("jsonschema.validators.requests") as requests:
+ requests.get.return_value.json.return_value = schema
+ with self.resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, 12)
+ requests.get.assert_called_once_with("http://bar")
+
+ def test_it_retrieves_unstored_refs_via_urlopen(self):
+ ref = "http://bar#baz"
+ schema = {"baz" : 12}
+
+ with mock.patch("jsonschema.validators.requests", None):
+ with mock.patch("jsonschema.validators.urlopen") as urlopen:
+ urlopen.return_value.read.return_value = (
+ json.dumps(schema).encode("utf8"))
+ with self.resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, 12)
+ urlopen.assert_called_once_with("http://bar")
+
+ def test_it_can_construct_a_base_uri_from_a_schema(self):
+ schema = {"id" : "foo"}
+ resolver = RefResolver.from_schema(schema)
+ self.assertEqual(resolver.base_uri, "foo")
+ with resolver.resolving("") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("#") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("foo") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("foo#") as resolved:
+ self.assertEqual(resolved, schema)
+
+ def test_it_can_construct_a_base_uri_from_a_schema_without_id(self):
+ schema = {}
+ resolver = RefResolver.from_schema(schema)
+ self.assertEqual(resolver.base_uri, "")
+ with resolver.resolving("") as resolved:
+ self.assertEqual(resolved, schema)
+ with resolver.resolving("#") as resolved:
+ self.assertEqual(resolved, schema)
+
+ def test_custom_uri_scheme_handlers(self):
+ schema = {"foo": "bar"}
+ ref = "foo://bar"
+ foo_handler = mock.Mock(return_value=schema)
+ resolver = RefResolver("", {}, handlers={"foo": foo_handler})
+ with resolver.resolving(ref) as resolved:
+ self.assertEqual(resolved, schema)
+ foo_handler.assert_called_once_with(ref)
+
+ def test_cache_remote_on(self):
+ ref = "foo://bar"
+ foo_handler = mock.Mock()
+ resolver = RefResolver(
+ "", {}, cache_remote=True, handlers={"foo" : foo_handler},
+ )
+ with resolver.resolving(ref):
+ pass
+ with resolver.resolving(ref):
+ pass
+ foo_handler.assert_called_once_with(ref)
+
+ def test_cache_remote_off(self):
+ ref = "foo://bar"
+ foo_handler = mock.Mock()
+ resolver = RefResolver(
+ "", {}, cache_remote=False, handlers={"foo" : foo_handler},
+ )
+ with resolver.resolving(ref):
+ pass
+ with resolver.resolving(ref):
+ pass
+ self.assertEqual(foo_handler.call_count, 2)
+
+ def test_if_you_give_it_junk_you_get_a_resolution_error(self):
+ ref = "foo://bar"
+ foo_handler = mock.Mock(side_effect=ValueError("Oh no! What's this?"))
+ resolver = RefResolver("", {}, handlers={"foo" : foo_handler})
+ with self.assertRaises(RefResolutionError) as err:
+ with resolver.resolving(ref):
+ pass
+ self.assertEqual(str(err.exception), "Oh no! What's this?")
+
+
+def sorted_errors(errors):
+ def key(error):
+ return (
+ [str(e) for e in error.path],
+ [str(e) for e in error.schema_path]
+ )
+ return sorted(errors, key=key)
diff --git a/lib/spack/external/jsonschema/validators.py b/lib/spack/external/jsonschema/validators.py
new file mode 100644
index 0000000000..3e326844f4
--- /dev/null
+++ b/lib/spack/external/jsonschema/validators.py
@@ -0,0 +1,428 @@
+from __future__ import division
+
+import contextlib
+import json
+import numbers
+
+try:
+ import requests
+except ImportError:
+ requests = None
+
+from jsonschema import _utils, _validators
+from jsonschema.compat import (
+ Sequence, urljoin, urlsplit, urldefrag, unquote, urlopen,
+ str_types, int_types, iteritems,
+)
+from jsonschema.exceptions import ErrorTree # Backwards compatibility # noqa
+from jsonschema.exceptions import RefResolutionError, SchemaError, UnknownType
+
+
+_unset = _utils.Unset()
+
+validators = {}
+meta_schemas = _utils.URIDict()
+
+
+def validates(version):
+ """
+ Register the decorated validator for a ``version`` of the specification.
+
+ Registered validators and their meta schemas will be considered when
+ parsing ``$schema`` properties' URIs.
+
+ :argument str version: an identifier to use as the version's name
+ :returns: a class decorator to decorate the validator with the version
+
+ """
+
+ def _validates(cls):
+ validators[version] = cls
+ if u"id" in cls.META_SCHEMA:
+ meta_schemas[cls.META_SCHEMA[u"id"]] = cls
+ return cls
+ return _validates
+
+
+def create(meta_schema, validators=(), version=None, default_types=None): # noqa
+ if default_types is None:
+ default_types = {
+ u"array" : list, u"boolean" : bool, u"integer" : int_types,
+ u"null" : type(None), u"number" : numbers.Number, u"object" : dict,
+ u"string" : str_types,
+ }
+
+ class Validator(object):
+ VALIDATORS = dict(validators)
+ META_SCHEMA = dict(meta_schema)
+ DEFAULT_TYPES = dict(default_types)
+
+ def __init__(
+ self, schema, types=(), resolver=None, format_checker=None,
+ ):
+ self._types = dict(self.DEFAULT_TYPES)
+ self._types.update(types)
+
+ if resolver is None:
+ resolver = RefResolver.from_schema(schema)
+
+ self.resolver = resolver
+ self.format_checker = format_checker
+ self.schema = schema
+
+ @classmethod
+ def check_schema(cls, schema):
+ for error in cls(cls.META_SCHEMA).iter_errors(schema):
+ raise SchemaError.create_from(error)
+
+ def iter_errors(self, instance, _schema=None):
+ if _schema is None:
+ _schema = self.schema
+
+ with self.resolver.in_scope(_schema.get(u"id", u"")):
+ ref = _schema.get(u"$ref")
+ if ref is not None:
+ validators = [(u"$ref", ref)]
+ else:
+ validators = iteritems(_schema)
+
+ for k, v in validators:
+ validator = self.VALIDATORS.get(k)
+ if validator is None:
+ continue
+
+ errors = validator(self, v, instance, _schema) or ()
+ for error in errors:
+ # set details if not already set by the called fn
+ error._set(
+ validator=k,
+ validator_value=v,
+ instance=instance,
+ schema=_schema,
+ )
+ if k != u"$ref":
+ error.schema_path.appendleft(k)
+ yield error
+
+ def descend(self, instance, schema, path=None, schema_path=None):
+ for error in self.iter_errors(instance, schema):
+ if path is not None:
+ error.path.appendleft(path)
+ if schema_path is not None:
+ error.schema_path.appendleft(schema_path)
+ yield error
+
+ def validate(self, *args, **kwargs):
+ for error in self.iter_errors(*args, **kwargs):
+ raise error
+
+ def is_type(self, instance, type):
+ if type not in self._types:
+ raise UnknownType(type, instance, self.schema)
+ pytypes = self._types[type]
+
+ # bool inherits from int, so ensure bools aren't reported as ints
+ if isinstance(instance, bool):
+ pytypes = _utils.flatten(pytypes)
+ is_number = any(
+ issubclass(pytype, numbers.Number) for pytype in pytypes
+ )
+ if is_number and bool not in pytypes:
+ return False
+ return isinstance(instance, pytypes)
+
+ def is_valid(self, instance, _schema=None):
+ error = next(self.iter_errors(instance, _schema), None)
+ return error is None
+
+ if version is not None:
+ Validator = validates(version)(Validator)
+ Validator.__name__ = version.title().replace(" ", "") + "Validator"
+
+ return Validator
+
+
+def extend(validator, validators, version=None):
+ all_validators = dict(validator.VALIDATORS)
+ all_validators.update(validators)
+ return create(
+ meta_schema=validator.META_SCHEMA,
+ validators=all_validators,
+ version=version,
+ default_types=validator.DEFAULT_TYPES,
+ )
+
+
+Draft3Validator = create(
+ meta_schema=_utils.load_schema("draft3"),
+ validators={
+ u"$ref" : _validators.ref,
+ u"additionalItems" : _validators.additionalItems,
+ u"additionalProperties" : _validators.additionalProperties,
+ u"dependencies" : _validators.dependencies,
+ u"disallow" : _validators.disallow_draft3,
+ u"divisibleBy" : _validators.multipleOf,
+ u"enum" : _validators.enum,
+ u"extends" : _validators.extends_draft3,
+ u"format" : _validators.format,
+ u"items" : _validators.items,
+ u"maxItems" : _validators.maxItems,
+ u"maxLength" : _validators.maxLength,
+ u"maximum" : _validators.maximum,
+ u"minItems" : _validators.minItems,
+ u"minLength" : _validators.minLength,
+ u"minimum" : _validators.minimum,
+ u"multipleOf" : _validators.multipleOf,
+ u"pattern" : _validators.pattern,
+ u"patternProperties" : _validators.patternProperties,
+ u"properties" : _validators.properties_draft3,
+ u"type" : _validators.type_draft3,
+ u"uniqueItems" : _validators.uniqueItems,
+ },
+ version="draft3",
+)
+
+Draft4Validator = create(
+ meta_schema=_utils.load_schema("draft4"),
+ validators={
+ u"$ref" : _validators.ref,
+ u"additionalItems" : _validators.additionalItems,
+ u"additionalProperties" : _validators.additionalProperties,
+ u"allOf" : _validators.allOf_draft4,
+ u"anyOf" : _validators.anyOf_draft4,
+ u"dependencies" : _validators.dependencies,
+ u"enum" : _validators.enum,
+ u"format" : _validators.format,
+ u"items" : _validators.items,
+ u"maxItems" : _validators.maxItems,
+ u"maxLength" : _validators.maxLength,
+ u"maxProperties" : _validators.maxProperties_draft4,
+ u"maximum" : _validators.maximum,
+ u"minItems" : _validators.minItems,
+ u"minLength" : _validators.minLength,
+ u"minProperties" : _validators.minProperties_draft4,
+ u"minimum" : _validators.minimum,
+ u"multipleOf" : _validators.multipleOf,
+ u"not" : _validators.not_draft4,
+ u"oneOf" : _validators.oneOf_draft4,
+ u"pattern" : _validators.pattern,
+ u"patternProperties" : _validators.patternProperties,
+ u"properties" : _validators.properties_draft4,
+ u"required" : _validators.required_draft4,
+ u"type" : _validators.type_draft4,
+ u"uniqueItems" : _validators.uniqueItems,
+ },
+ version="draft4",
+)
+
+
+class RefResolver(object):
+ """
+ Resolve JSON References.
+
+ :argument str base_uri: URI of the referring document
+ :argument referrer: the actual referring document
+ :argument dict store: a mapping from URIs to documents to cache
+ :argument bool cache_remote: whether remote refs should be cached after
+ first resolution
+ :argument dict handlers: a mapping from URI schemes to functions that
+ should be used to retrieve them
+
+ """
+
+ def __init__(
+ self, base_uri, referrer, store=(), cache_remote=True, handlers=(),
+ ):
+ self.base_uri = base_uri
+ self.resolution_scope = base_uri
+ # This attribute is not used, it is for backwards compatibility
+ self.referrer = referrer
+ self.cache_remote = cache_remote
+ self.handlers = dict(handlers)
+
+ self.store = _utils.URIDict(
+ (id, validator.META_SCHEMA)
+ for id, validator in iteritems(meta_schemas)
+ )
+ self.store.update(store)
+ self.store[base_uri] = referrer
+
+ @classmethod
+ def from_schema(cls, schema, *args, **kwargs):
+ """
+ Construct a resolver from a JSON schema object.
+
+ :argument schema schema: the referring schema
+ :rtype: :class:`RefResolver`
+
+ """
+
+ return cls(schema.get(u"id", u""), schema, *args, **kwargs)
+
+ @contextlib.contextmanager
+ def in_scope(self, scope):
+ old_scope = self.resolution_scope
+ self.resolution_scope = urljoin(old_scope, scope)
+ try:
+ yield
+ finally:
+ self.resolution_scope = old_scope
+
+ @contextlib.contextmanager
+ def resolving(self, ref):
+ """
+ Context manager which resolves a JSON ``ref`` and enters the
+ resolution scope of this ref.
+
+ :argument str ref: reference to resolve
+
+ """
+
+ full_uri = urljoin(self.resolution_scope, ref)
+ uri, fragment = urldefrag(full_uri)
+ if not uri:
+ uri = self.base_uri
+
+ if uri in self.store:
+ document = self.store[uri]
+ else:
+ try:
+ document = self.resolve_remote(uri)
+ except Exception as exc:
+ raise RefResolutionError(exc)
+
+ old_base_uri, self.base_uri = self.base_uri, uri
+ try:
+ with self.in_scope(uri):
+ yield self.resolve_fragment(document, fragment)
+ finally:
+ self.base_uri = old_base_uri
+
+ def resolve_fragment(self, document, fragment):
+ """
+ Resolve a ``fragment`` within the referenced ``document``.
+
+ :argument document: the referrant document
+ :argument str fragment: a URI fragment to resolve within it
+
+ """
+
+ fragment = fragment.lstrip(u"/")
+ parts = unquote(fragment).split(u"/") if fragment else []
+
+ for part in parts:
+ part = part.replace(u"~1", u"/").replace(u"~0", u"~")
+
+ if isinstance(document, Sequence):
+ # Array indexes should be turned into integers
+ try:
+ part = int(part)
+ except ValueError:
+ pass
+ try:
+ document = document[part]
+ except (TypeError, LookupError):
+ raise RefResolutionError(
+ "Unresolvable JSON pointer: %r" % fragment
+ )
+
+ return document
+
+ def resolve_remote(self, uri):
+ """
+ Resolve a remote ``uri``.
+
+ Does not check the store first, but stores the retrieved document in
+ the store if :attr:`RefResolver.cache_remote` is True.
+
+ .. note::
+
+ If the requests_ library is present, ``jsonschema`` will use it to
+ request the remote ``uri``, so that the correct encoding is
+ detected and used.
+
+ If it isn't, or if the scheme of the ``uri`` is not ``http`` or
+ ``https``, UTF-8 is assumed.
+
+ :argument str uri: the URI to resolve
+ :returns: the retrieved document
+
+ .. _requests: http://pypi.python.org/pypi/requests/
+
+ """
+
+ scheme = urlsplit(uri).scheme
+
+ if scheme in self.handlers:
+ result = self.handlers[scheme](uri)
+ elif (
+ scheme in [u"http", u"https"] and
+ requests and
+ getattr(requests.Response, "json", None) is not None
+ ):
+ # Requests has support for detecting the correct encoding of
+ # json over http
+ if callable(requests.Response.json):
+ result = requests.get(uri).json()
+ else:
+ result = requests.get(uri).json
+ else:
+ # Otherwise, pass off to urllib and assume utf-8
+ result = json.loads(urlopen(uri).read().decode("utf-8"))
+
+ if self.cache_remote:
+ self.store[uri] = result
+ return result
+
+
+def validator_for(schema, default=_unset):
+ if default is _unset:
+ default = Draft4Validator
+ return meta_schemas.get(schema.get(u"$schema", u""), default)
+
+
+def validate(instance, schema, cls=None, *args, **kwargs):
+ """
+ Validate an instance under the given schema.
+
+ >>> validate([2, 3, 4], {"maxItems" : 2})
+ Traceback (most recent call last):
+ ...
+ ValidationError: [2, 3, 4] is too long
+
+ :func:`validate` will first verify that the provided schema is itself
+ valid, since not doing so can lead to less obvious error messages and fail
+ in less obvious or consistent ways. If you know you have a valid schema
+ already or don't care, you might prefer using the
+ :meth:`~IValidator.validate` method directly on a specific validator
+ (e.g. :meth:`Draft4Validator.validate`).
+
+
+ :argument instance: the instance to validate
+ :argument schema: the schema to validate with
+ :argument cls: an :class:`IValidator` class that will be used to validate
+ the instance.
+
+ If the ``cls`` argument is not provided, two things will happen in
+ accordance with the specification. First, if the schema has a
+ :validator:`$schema` property containing a known meta-schema [#]_ then the
+ proper validator will be used. The specification recommends that all
+ schemas contain :validator:`$schema` properties for this reason. If no
+ :validator:`$schema` property is found, the default validator class is
+ :class:`Draft4Validator`.
+
+ Any other provided positional and keyword arguments will be passed on when
+ instantiating the ``cls``.
+
+ :raises:
+ :exc:`ValidationError` if the instance is invalid
+
+ :exc:`SchemaError` if the schema itself is invalid
+
+ .. rubric:: Footnotes
+ .. [#] known by a validator registered with :func:`validates`
+ """
+ if cls is None:
+ cls = validator_for(schema)
+ cls.check_schema(schema)
+ cls(schema, *args, **kwargs).validate(instance)
diff --git a/lib/spack/external/nose/LICENSE b/lib/spack/external/nose/LICENSE
new file mode 100644
index 0000000000..8add30ad59
--- /dev/null
+++ b/lib/spack/external/nose/LICENSE
@@ -0,0 +1,504 @@
+ GNU LESSER GENERAL PUBLIC LICENSE
+ Version 2.1, February 1999
+
+ Copyright (C) 1991, 1999 Free Software Foundation, Inc.
+ 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+[This is the first released version of the Lesser GPL. It also counts
+ as the successor of the GNU Library Public License, version 2, hence
+ the version number 2.1.]
+
+ Preamble
+
+ The licenses for most software are designed to take away your
+freedom to share and change it. By contrast, the GNU General Public
+Licenses are intended to guarantee your freedom to share and change
+free software--to make sure the software is free for all its users.
+
+ This license, the Lesser General Public License, applies to some
+specially designated software packages--typically libraries--of the
+Free Software Foundation and other authors who decide to use it. You
+can use it too, but we suggest you first think carefully about whether
+this license or the ordinary General Public License is the better
+strategy to use in any particular case, based on the explanations below.
+
+ When we speak of free software, we are referring to freedom of use,
+not price. Our General Public Licenses are designed to make sure that
+you have the freedom to distribute copies of free software (and charge
+for this service if you wish); that you receive source code or can get
+it if you want it; that you can change the software and use pieces of
+it in new free programs; and that you are informed that you can do
+these things.
+
+ To protect your rights, we need to make restrictions that forbid
+distributors to deny you these rights or to ask you to surrender these
+rights. These restrictions translate to certain responsibilities for
+you if you distribute copies of the library or if you modify it.
+
+ For example, if you distribute copies of the library, whether gratis
+or for a fee, you must give the recipients all the rights that we gave
+you. You must make sure that they, too, receive or can get the source
+code. If you link other code with the library, you must provide
+complete object files to the recipients, so that they can relink them
+with the library after making changes to the library and recompiling
+it. And you must show them these terms so they know their rights.
+
+ We protect your rights with a two-step method: (1) we copyright the
+library, and (2) we offer you this license, which gives you legal
+permission to copy, distribute and/or modify the library.
+
+ To protect each distributor, we want to make it very clear that
+there is no warranty for the free library. Also, if the library is
+modified by someone else and passed on, the recipients should know
+that what they have is not the original version, so that the original
+author's reputation will not be affected by problems that might be
+introduced by others.
+
+ Finally, software patents pose a constant threat to the existence of
+any free program. We wish to make sure that a company cannot
+effectively restrict the users of a free program by obtaining a
+restrictive license from a patent holder. Therefore, we insist that
+any patent license obtained for a version of the library must be
+consistent with the full freedom of use specified in this license.
+
+ Most GNU software, including some libraries, is covered by the
+ordinary GNU General Public License. This license, the GNU Lesser
+General Public License, applies to certain designated libraries, and
+is quite different from the ordinary General Public License. We use
+this license for certain libraries in order to permit linking those
+libraries into non-free programs.
+
+ When a program is linked with a library, whether statically or using
+a shared library, the combination of the two is legally speaking a
+combined work, a derivative of the original library. The ordinary
+General Public License therefore permits such linking only if the
+entire combination fits its criteria of freedom. The Lesser General
+Public License permits more lax criteria for linking other code with
+the library.
+
+ We call this license the "Lesser" General Public License because it
+does Less to protect the user's freedom than the ordinary General
+Public License. It also provides other free software developers Less
+of an advantage over competing non-free programs. These disadvantages
+are the reason we use the ordinary General Public License for many
+libraries. However, the Lesser license provides advantages in certain
+special circumstances.
+
+ For example, on rare occasions, there may be a special need to
+encourage the widest possible use of a certain library, so that it becomes
+a de-facto standard. To achieve this, non-free programs must be
+allowed to use the library. A more frequent case is that a free
+library does the same job as widely used non-free libraries. In this
+case, there is little to gain by limiting the free library to free
+software only, so we use the Lesser General Public License.
+
+ In other cases, permission to use a particular library in non-free
+programs enables a greater number of people to use a large body of
+free software. For example, permission to use the GNU C Library in
+non-free programs enables many more people to use the whole GNU
+operating system, as well as its variant, the GNU/Linux operating
+system.
+
+ Although the Lesser General Public License is Less protective of the
+users' freedom, it does ensure that the user of a program that is
+linked with the Library has the freedom and the wherewithal to run
+that program using a modified version of the Library.
+
+ The precise terms and conditions for copying, distribution and
+modification follow. Pay close attention to the difference between a
+"work based on the library" and a "work that uses the library". The
+former contains code derived from the library, whereas the latter must
+be combined with the library in order to run.
+
+ GNU LESSER GENERAL PUBLIC LICENSE
+ TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+ 0. This License Agreement applies to any software library or other
+program which contains a notice placed by the copyright holder or
+other authorized party saying it may be distributed under the terms of
+this Lesser General Public License (also called "this License").
+Each licensee is addressed as "you".
+
+ A "library" means a collection of software functions and/or data
+prepared so as to be conveniently linked with application programs
+(which use some of those functions and data) to form executables.
+
+ The "Library", below, refers to any such software library or work
+which has been distributed under these terms. A "work based on the
+Library" means either the Library or any derivative work under
+copyright law: that is to say, a work containing the Library or a
+portion of it, either verbatim or with modifications and/or translated
+straightforwardly into another language. (Hereinafter, translation is
+included without limitation in the term "modification".)
+
+ "Source code" for a work means the preferred form of the work for
+making modifications to it. For a library, complete source code means
+all the source code for all modules it contains, plus any associated
+interface definition files, plus the scripts used to control compilation
+and installation of the library.
+
+ Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope. The act of
+running a program using the Library is not restricted, and output from
+such a program is covered only if its contents constitute a work based
+on the Library (independent of the use of the Library in a tool for
+writing it). Whether that is true depends on what the Library does
+and what the program that uses the Library does.
+
+ 1. You may copy and distribute verbatim copies of the Library's
+complete source code as you receive it, in any medium, provided that
+you conspicuously and appropriately publish on each copy an
+appropriate copyright notice and disclaimer of warranty; keep intact
+all the notices that refer to this License and to the absence of any
+warranty; and distribute a copy of this License along with the
+Library.
+
+ You may charge a fee for the physical act of transferring a copy,
+and you may at your option offer warranty protection in exchange for a
+fee.
+
+ 2. You may modify your copy or copies of the Library or any portion
+of it, thus forming a work based on the Library, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+ a) The modified work must itself be a software library.
+
+ b) You must cause the files modified to carry prominent notices
+ stating that you changed the files and the date of any change.
+
+ c) You must cause the whole of the work to be licensed at no
+ charge to all third parties under the terms of this License.
+
+ d) If a facility in the modified Library refers to a function or a
+ table of data to be supplied by an application program that uses
+ the facility, other than as an argument passed when the facility
+ is invoked, then you must make a good faith effort to ensure that,
+ in the event an application does not supply such function or
+ table, the facility still operates, and performs whatever part of
+ its purpose remains meaningful.
+
+ (For example, a function in a library to compute square roots has
+ a purpose that is entirely well-defined independent of the
+ application. Therefore, Subsection 2d requires that any
+ application-supplied function or table used by this function must
+ be optional: if the application does not supply it, the square
+ root function must still compute square roots.)
+
+These requirements apply to the modified work as a whole. If
+identifiable sections of that work are not derived from the Library,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works. But when you
+distribute the same sections as part of a whole which is a work based
+on the Library, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote
+it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Library.
+
+In addition, mere aggregation of another work not based on the Library
+with the Library (or with a work based on the Library) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+ 3. You may opt to apply the terms of the ordinary GNU General Public
+License instead of this License to a given copy of the Library. To do
+this, you must alter all the notices that refer to this License, so
+that they refer to the ordinary GNU General Public License, version 2,
+instead of to this License. (If a newer version than version 2 of the
+ordinary GNU General Public License has appeared, then you can specify
+that version instead if you wish.) Do not make any other change in
+these notices.
+
+ Once this change is made in a given copy, it is irreversible for
+that copy, so the ordinary GNU General Public License applies to all
+subsequent copies and derivative works made from that copy.
+
+ This option is useful when you wish to copy part of the code of
+the Library into a program that is not a library.
+
+ 4. You may copy and distribute the Library (or a portion or
+derivative of it, under Section 2) in object code or executable form
+under the terms of Sections 1 and 2 above provided that you accompany
+it with the complete corresponding machine-readable source code, which
+must be distributed under the terms of Sections 1 and 2 above on a
+medium customarily used for software interchange.
+
+ If distribution of object code is made by offering access to copy
+from a designated place, then offering equivalent access to copy the
+source code from the same place satisfies the requirement to
+distribute the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+ 5. A program that contains no derivative of any portion of the
+Library, but is designed to work with the Library by being compiled or
+linked with it, is called a "work that uses the Library". Such a
+work, in isolation, is not a derivative work of the Library, and
+therefore falls outside the scope of this License.
+
+ However, linking a "work that uses the Library" with the Library
+creates an executable that is a derivative of the Library (because it
+contains portions of the Library), rather than a "work that uses the
+library". The executable is therefore covered by this License.
+Section 6 states terms for distribution of such executables.
+
+ When a "work that uses the Library" uses material from a header file
+that is part of the Library, the object code for the work may be a
+derivative work of the Library even though the source code is not.
+Whether this is true is especially significant if the work can be
+linked without the Library, or if the work is itself a library. The
+threshold for this to be true is not precisely defined by law.
+
+ If such an object file uses only numerical parameters, data
+structure layouts and accessors, and small macros and small inline
+functions (ten lines or less in length), then the use of the object
+file is unrestricted, regardless of whether it is legally a derivative
+work. (Executables containing this object code plus portions of the
+Library will still fall under Section 6.)
+
+ Otherwise, if the work is a derivative of the Library, you may
+distribute the object code for the work under the terms of Section 6.
+Any executables containing that work also fall under Section 6,
+whether or not they are linked directly with the Library itself.
+
+ 6. As an exception to the Sections above, you may also combine or
+link a "work that uses the Library" with the Library to produce a
+work containing portions of the Library, and distribute that work
+under terms of your choice, provided that the terms permit
+modification of the work for the customer's own use and reverse
+engineering for debugging such modifications.
+
+ You must give prominent notice with each copy of the work that the
+Library is used in it and that the Library and its use are covered by
+this License. You must supply a copy of this License. If the work
+during execution displays copyright notices, you must include the
+copyright notice for the Library among them, as well as a reference
+directing the user to the copy of this License. Also, you must do one
+of these things:
+
+ a) Accompany the work with the complete corresponding
+ machine-readable source code for the Library including whatever
+ changes were used in the work (which must be distributed under
+ Sections 1 and 2 above); and, if the work is an executable linked
+ with the Library, with the complete machine-readable "work that
+ uses the Library", as object code and/or source code, so that the
+ user can modify the Library and then relink to produce a modified
+ executable containing the modified Library. (It is understood
+ that the user who changes the contents of definitions files in the
+ Library will not necessarily be able to recompile the application
+ to use the modified definitions.)
+
+ b) Use a suitable shared library mechanism for linking with the
+ Library. A suitable mechanism is one that (1) uses at run time a
+ copy of the library already present on the user's computer system,
+ rather than copying library functions into the executable, and (2)
+ will operate properly with a modified version of the library, if
+ the user installs one, as long as the modified version is
+ interface-compatible with the version that the work was made with.
+
+ c) Accompany the work with a written offer, valid for at
+ least three years, to give the same user the materials
+ specified in Subsection 6a, above, for a charge no more
+ than the cost of performing this distribution.
+
+ d) If distribution of the work is made by offering access to copy
+ from a designated place, offer equivalent access to copy the above
+ specified materials from the same place.
+
+ e) Verify that the user has already received a copy of these
+ materials or that you have already sent this user a copy.
+
+ For an executable, the required form of the "work that uses the
+Library" must include any data and utility programs needed for
+reproducing the executable from it. However, as a special exception,
+the materials to be distributed need not include anything that is
+normally distributed (in either source or binary form) with the major
+components (compiler, kernel, and so on) of the operating system on
+which the executable runs, unless that component itself accompanies
+the executable.
+
+ It may happen that this requirement contradicts the license
+restrictions of other proprietary libraries that do not normally
+accompany the operating system. Such a contradiction means you cannot
+use both them and the Library together in an executable that you
+distribute.
+
+ 7. You may place library facilities that are a work based on the
+Library side-by-side in a single library together with other library
+facilities not covered by this License, and distribute such a combined
+library, provided that the separate distribution of the work based on
+the Library and of the other library facilities is otherwise
+permitted, and provided that you do these two things:
+
+ a) Accompany the combined library with a copy of the same work
+ based on the Library, uncombined with any other library
+ facilities. This must be distributed under the terms of the
+ Sections above.
+
+ b) Give prominent notice with the combined library of the fact
+ that part of it is a work based on the Library, and explaining
+ where to find the accompanying uncombined form of the same work.
+
+ 8. You may not copy, modify, sublicense, link with, or distribute
+the Library except as expressly provided under this License. Any
+attempt otherwise to copy, modify, sublicense, link with, or
+distribute the Library is void, and will automatically terminate your
+rights under this License. However, parties who have received copies,
+or rights, from you under this License will not have their licenses
+terminated so long as such parties remain in full compliance.
+
+ 9. You are not required to accept this License, since you have not
+signed it. However, nothing else grants you permission to modify or
+distribute the Library or its derivative works. These actions are
+prohibited by law if you do not accept this License. Therefore, by
+modifying or distributing the Library (or any work based on the
+Library), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Library or works based on it.
+
+ 10. Each time you redistribute the Library (or any work based on the
+Library), the recipient automatically receives a license from the
+original licensor to copy, distribute, link with or modify the Library
+subject to these terms and conditions. You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties with
+this License.
+
+ 11. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License. If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Library at all. For example, if a patent
+license would not permit royalty-free redistribution of the Library by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Library.
+
+If any portion of this section is held invalid or unenforceable under any
+particular circumstance, the balance of the section is intended to apply,
+and the section as a whole is intended to apply in other circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system which is
+implemented by public license practices. Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+ 12. If the distribution and/or use of the Library is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Library under this License may add
+an explicit geographical distribution limitation excluding those countries,
+so that distribution is permitted only in or among countries not thus
+excluded. In such case, this License incorporates the limitation as if
+written in the body of this License.
+
+ 13. The Free Software Foundation may publish revised and/or new
+versions of the Lesser General Public License from time to time.
+Such new versions will be similar in spirit to the present version,
+but may differ in detail to address new problems or concerns.
+
+Each version is given a distinguishing version number. If the Library
+specifies a version number of this License which applies to it and
+"any later version", you have the option of following the terms and
+conditions either of that version or of any later version published by
+the Free Software Foundation. If the Library does not specify a
+license version number, you may choose any version ever published by
+the Free Software Foundation.
+
+ 14. If you wish to incorporate parts of the Library into other free
+programs whose distribution conditions are incompatible with these,
+write to the author to ask for permission. For software which is
+copyrighted by the Free Software Foundation, write to the Free
+Software Foundation; we sometimes make exceptions for this. Our
+decision will be guided by the two goals of preserving the free status
+of all derivatives of our free software and of promoting the sharing
+and reuse of software generally.
+
+ NO WARRANTY
+
+ 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
+WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
+EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
+OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
+KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
+PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
+LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
+THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
+
+ 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
+WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
+FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
+CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
+LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
+RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
+FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
+SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGES.
+
+ END OF TERMS AND CONDITIONS
+
+ How to Apply These Terms to Your New Libraries
+
+ If you develop a new library, and you want it to be of the greatest
+possible use to the public, we recommend making it free software that
+everyone can redistribute and change. You can do so by permitting
+redistribution under these terms (or, alternatively, under the terms of the
+ordinary General Public License).
+
+ To apply these terms, attach the following notices to the library. It is
+safest to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least the
+"copyright" line and a pointer to where the full notice is found.
+
+ <one line to give the library's name and a brief idea of what it does.>
+ Copyright (C) <year> <name of author>
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
+
+Also add information on how to contact you by electronic and paper mail.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the library, if
+necessary. Here is a sample; alter the names:
+
+ Yoyodyne, Inc., hereby disclaims all copyright interest in the
+ library `Frob' (a library for tweaking knobs) written by James Random Hacker.
+
+ <signature of Ty Coon>, 1 April 1990
+ Ty Coon, President of Vice
+
+That's all there is to it!
+
+
diff --git a/lib/spack/external/nose/__init__.py b/lib/spack/external/nose/__init__.py
new file mode 100644
index 0000000000..1ae1362b7a
--- /dev/null
+++ b/lib/spack/external/nose/__init__.py
@@ -0,0 +1,15 @@
+from nose.core import collector, main, run, run_exit, runmodule
+# backwards compatibility
+from nose.exc import SkipTest, DeprecatedTest
+from nose.tools import with_setup
+
+__author__ = 'Jason Pellerin'
+__versioninfo__ = (1, 3, 7)
+__version__ = '.'.join(map(str, __versioninfo__))
+
+__all__ = [
+ 'main', 'run', 'run_exit', 'runmodule', 'with_setup',
+ 'SkipTest', 'DeprecatedTest', 'collector'
+ ]
+
+
diff --git a/lib/spack/external/nose/__main__.py b/lib/spack/external/nose/__main__.py
new file mode 100644
index 0000000000..b402d9df12
--- /dev/null
+++ b/lib/spack/external/nose/__main__.py
@@ -0,0 +1,8 @@
+import sys
+
+from nose.core import run_exit
+
+if sys.argv[0].endswith('__main__.py'):
+ sys.argv[0] = '%s -m nose' % sys.executable
+
+run_exit()
diff --git a/lib/spack/external/nose/case.py b/lib/spack/external/nose/case.py
new file mode 100644
index 0000000000..cffa4ab4c9
--- /dev/null
+++ b/lib/spack/external/nose/case.py
@@ -0,0 +1,397 @@
+"""nose unittest.TestCase subclasses. It is not necessary to subclass these
+classes when writing tests; they are used internally by nose.loader.TestLoader
+to create test cases from test functions and methods in test classes.
+"""
+import logging
+import sys
+import unittest
+from inspect import isfunction
+from nose.config import Config
+from nose.failure import Failure # for backwards compatibility
+from nose.util import resolve_name, test_address, try_run
+
+log = logging.getLogger(__name__)
+
+
+__all__ = ['Test']
+
+
+class Test(unittest.TestCase):
+ """The universal test case wrapper.
+
+ When a plugin sees a test, it will always see an instance of this
+ class. To access the actual test case that will be run, access the
+ test property of the nose.case.Test instance.
+ """
+ __test__ = False # do not collect
+ def __init__(self, test, config=None, resultProxy=None):
+ # sanity check
+ if not callable(test):
+ raise TypeError("nose.case.Test called with argument %r that "
+ "is not callable. A callable is required."
+ % test)
+ self.test = test
+ if config is None:
+ config = Config()
+ self.config = config
+ self.tbinfo = None
+ self.capturedOutput = None
+ self.resultProxy = resultProxy
+ self.plugins = config.plugins
+ self.passed = None
+ unittest.TestCase.__init__(self)
+
+ def __call__(self, *arg, **kwarg):
+ return self.run(*arg, **kwarg)
+
+ def __str__(self):
+ name = self.plugins.testName(self)
+ if name is not None:
+ return name
+ return str(self.test)
+
+ def __repr__(self):
+ return "Test(%r)" % self.test
+
+ def afterTest(self, result):
+ """Called after test is complete (after result.stopTest)
+ """
+ try:
+ afterTest = result.afterTest
+ except AttributeError:
+ pass
+ else:
+ afterTest(self.test)
+
+ def beforeTest(self, result):
+ """Called before test is run (before result.startTest)
+ """
+ try:
+ beforeTest = result.beforeTest
+ except AttributeError:
+ pass
+ else:
+ beforeTest(self.test)
+
+ def exc_info(self):
+ """Extract exception info.
+ """
+ exc, exv, tb = sys.exc_info()
+ return (exc, exv, tb)
+
+ def id(self):
+ """Get a short(er) description of the test
+ """
+ return self.test.id()
+
+ def address(self):
+ """Return a round-trip name for this test, a name that can be
+ fed back as input to loadTestByName and (assuming the same
+ plugin configuration) result in the loading of this test.
+ """
+ if hasattr(self.test, 'address'):
+ return self.test.address()
+ else:
+ # not a nose case
+ return test_address(self.test)
+
+ def _context(self):
+ try:
+ return self.test.context
+ except AttributeError:
+ pass
+ try:
+ return self.test.__class__
+ except AttributeError:
+ pass
+ try:
+ return resolve_name(self.test.__module__)
+ except AttributeError:
+ pass
+ return None
+ context = property(_context, None, None,
+ """Get the context object of this test (if any).""")
+
+ def run(self, result):
+ """Modified run for the test wrapper.
+
+ From here we don't call result.startTest or stopTest or
+ addSuccess. The wrapper calls addError/addFailure only if its
+ own setup or teardown fails, or running the wrapped test fails
+ (eg, if the wrapped "test" is not callable).
+
+ Two additional methods are called, beforeTest and
+ afterTest. These give plugins a chance to modify the wrapped
+ test before it is called and do cleanup after it is
+ called. They are called unconditionally.
+ """
+ if self.resultProxy:
+ result = self.resultProxy(result, self)
+ try:
+ try:
+ self.beforeTest(result)
+ self.runTest(result)
+ except KeyboardInterrupt:
+ raise
+ except:
+ err = sys.exc_info()
+ result.addError(self, err)
+ finally:
+ self.afterTest(result)
+
+ def runTest(self, result):
+ """Run the test. Plugins may alter the test by returning a
+ value from prepareTestCase. The value must be callable and
+ must accept one argument, the result instance.
+ """
+ test = self.test
+ plug_test = self.config.plugins.prepareTestCase(self)
+ if plug_test is not None:
+ test = plug_test
+ test(result)
+
+ def shortDescription(self):
+ desc = self.plugins.describeTest(self)
+ if desc is not None:
+ return desc
+ # work around bug in unittest.TestCase.shortDescription
+ # with multiline docstrings.
+ test = self.test
+ try:
+ test._testMethodDoc = test._testMethodDoc.strip()# 2.5
+ except AttributeError:
+ try:
+ # 2.4 and earlier
+ test._TestCase__testMethodDoc = \
+ test._TestCase__testMethodDoc.strip()
+ except AttributeError:
+ pass
+ # 2.7 compat: shortDescription() always returns something
+ # which is a change from 2.6 and below, and breaks the
+ # testName plugin call.
+ try:
+ desc = self.test.shortDescription()
+ except Exception:
+ # this is probably caused by a problem in test.__str__() and is
+ # only triggered by python 3.1's unittest!
+ pass
+ try:
+ if desc == str(self.test):
+ return
+ except Exception:
+ # If str() triggers an exception then ignore it.
+ # see issue 422
+ pass
+ return desc
+
+
+class TestBase(unittest.TestCase):
+ """Common functionality for FunctionTestCase and MethodTestCase.
+ """
+ __test__ = False # do not collect
+
+ def id(self):
+ return str(self)
+
+ def runTest(self):
+ self.test(*self.arg)
+
+ def shortDescription(self):
+ if hasattr(self.test, 'description'):
+ return self.test.description
+ func, arg = self._descriptors()
+ doc = getattr(func, '__doc__', None)
+ if not doc:
+ doc = str(self)
+ return doc.strip().split("\n")[0].strip()
+
+
+class FunctionTestCase(TestBase):
+ """TestCase wrapper for test functions.
+
+ Don't use this class directly; it is used internally in nose to
+ create test cases for test functions.
+ """
+ __test__ = False # do not collect
+
+ def __init__(self, test, setUp=None, tearDown=None, arg=tuple(),
+ descriptor=None):
+ """Initialize the MethodTestCase.
+
+ Required argument:
+
+ * test -- the test function to call.
+
+ Optional arguments:
+
+ * setUp -- function to run at setup.
+
+ * tearDown -- function to run at teardown.
+
+ * arg -- arguments to pass to the test function. This is to support
+ generator functions that yield arguments.
+
+ * descriptor -- the function, other than the test, that should be used
+ to construct the test name. This is to support generator functions.
+ """
+
+ self.test = test
+ self.setUpFunc = setUp
+ self.tearDownFunc = tearDown
+ self.arg = arg
+ self.descriptor = descriptor
+ TestBase.__init__(self)
+
+ def address(self):
+ """Return a round-trip name for this test, a name that can be
+ fed back as input to loadTestByName and (assuming the same
+ plugin configuration) result in the loading of this test.
+ """
+ if self.descriptor is not None:
+ return test_address(self.descriptor)
+ else:
+ return test_address(self.test)
+
+ def _context(self):
+ return resolve_name(self.test.__module__)
+ context = property(_context, None, None,
+ """Get context (module) of this test""")
+
+ def setUp(self):
+ """Run any setup function attached to the test function
+ """
+ if self.setUpFunc:
+ self.setUpFunc()
+ else:
+ names = ('setup', 'setUp', 'setUpFunc')
+ try_run(self.test, names)
+
+ def tearDown(self):
+ """Run any teardown function attached to the test function
+ """
+ if self.tearDownFunc:
+ self.tearDownFunc()
+ else:
+ names = ('teardown', 'tearDown', 'tearDownFunc')
+ try_run(self.test, names)
+
+ def __str__(self):
+ func, arg = self._descriptors()
+ if hasattr(func, 'compat_func_name'):
+ name = func.compat_func_name
+ else:
+ name = func.__name__
+ name = "%s.%s" % (func.__module__, name)
+ if arg:
+ name = "%s%s" % (name, arg)
+ # FIXME need to include the full dir path to disambiguate
+ # in cases where test module of the same name was seen in
+ # another directory (old fromDirectory)
+ return name
+ __repr__ = __str__
+
+ def _descriptors(self):
+ """Get the descriptors of the test function: the function and
+ arguments that will be used to construct the test name. In
+ most cases, this is the function itself and no arguments. For
+ tests generated by generator functions, the original
+ (generator) function and args passed to the generated function
+ are returned.
+ """
+ if self.descriptor:
+ return self.descriptor, self.arg
+ else:
+ return self.test, self.arg
+
+
+class MethodTestCase(TestBase):
+ """Test case wrapper for test methods.
+
+ Don't use this class directly; it is used internally in nose to
+ create test cases for test methods.
+ """
+ __test__ = False # do not collect
+
+ def __init__(self, method, test=None, arg=tuple(), descriptor=None):
+ """Initialize the MethodTestCase.
+
+ Required argument:
+
+ * method -- the method to call, may be bound or unbound. In either
+ case, a new instance of the method's class will be instantiated to
+ make the call. Note: In Python 3.x, if using an unbound method, you
+ must wrap it using pyversion.unbound_method.
+
+ Optional arguments:
+
+ * test -- the test function to call. If this is passed, it will be
+ called instead of getting a new bound method of the same name as the
+ desired method from the test instance. This is to support generator
+ methods that yield inline functions.
+
+ * arg -- arguments to pass to the test function. This is to support
+ generator methods that yield arguments.
+
+ * descriptor -- the function, other than the test, that should be used
+ to construct the test name. This is to support generator methods.
+ """
+ self.method = method
+ self.test = test
+ self.arg = arg
+ self.descriptor = descriptor
+ if isfunction(method):
+ raise ValueError("Unbound methods must be wrapped using pyversion.unbound_method before passing to MethodTestCase")
+ self.cls = method.im_class
+ self.inst = self.cls()
+ if self.test is None:
+ method_name = self.method.__name__
+ self.test = getattr(self.inst, method_name)
+ TestBase.__init__(self)
+
+ def __str__(self):
+ func, arg = self._descriptors()
+ if hasattr(func, 'compat_func_name'):
+ name = func.compat_func_name
+ else:
+ name = func.__name__
+ name = "%s.%s.%s" % (self.cls.__module__,
+ self.cls.__name__,
+ name)
+ if arg:
+ name = "%s%s" % (name, arg)
+ return name
+ __repr__ = __str__
+
+ def address(self):
+ """Return a round-trip name for this test, a name that can be
+ fed back as input to loadTestByName and (assuming the same
+ plugin configuration) result in the loading of this test.
+ """
+ if self.descriptor is not None:
+ return test_address(self.descriptor)
+ else:
+ return test_address(self.method)
+
+ def _context(self):
+ return self.cls
+ context = property(_context, None, None,
+ """Get context (class) of this test""")
+
+ def setUp(self):
+ try_run(self.inst, ('setup', 'setUp'))
+
+ def tearDown(self):
+ try_run(self.inst, ('teardown', 'tearDown'))
+
+ def _descriptors(self):
+ """Get the descriptors of the test method: the method and
+ arguments that will be used to construct the test name. In
+ most cases, this is the method itself and no arguments. For
+ tests generated by generator methods, the original
+ (generator) method and args passed to the generated method
+ or function are returned.
+ """
+ if self.descriptor:
+ return self.descriptor, self.arg
+ else:
+ return self.method, self.arg
diff --git a/lib/spack/external/nose/commands.py b/lib/spack/external/nose/commands.py
new file mode 100644
index 0000000000..ef0e9caed4
--- /dev/null
+++ b/lib/spack/external/nose/commands.py
@@ -0,0 +1,172 @@
+"""
+nosetests setuptools command
+----------------------------
+
+The easiest way to run tests with nose is to use the `nosetests` setuptools
+command::
+
+ python setup.py nosetests
+
+This command has one *major* benefit over the standard `test` command: *all
+nose plugins are supported*.
+
+To configure the `nosetests` command, add a [nosetests] section to your
+setup.cfg. The [nosetests] section can contain any command line arguments that
+nosetests supports. The differences between issuing an option on the command
+line and adding it to setup.cfg are:
+
+* In setup.cfg, the -- prefix must be excluded
+* In setup.cfg, command line flags that take no arguments must be given an
+ argument flag (1, T or TRUE for active, 0, F or FALSE for inactive)
+
+Here's an example [nosetests] setup.cfg section::
+
+ [nosetests]
+ verbosity=1
+ detailed-errors=1
+ with-coverage=1
+ cover-package=nose
+ debug=nose.loader
+ pdb=1
+ pdb-failures=1
+
+If you commonly run nosetests with a large number of options, using
+the nosetests setuptools command and configuring with setup.cfg can
+make running your tests much less tedious. (Note that the same options
+and format supported in setup.cfg are supported in all other config
+files, and the nosetests script will also load config files.)
+
+Another reason to run tests with the command is that the command will
+install packages listed in your `tests_require`, as well as doing a
+complete build of your package before running tests. For packages with
+dependencies or that build C extensions, using the setuptools command
+can be more convenient than building by hand and running the nosetests
+script.
+
+Bootstrapping
+-------------
+
+If you are distributing your project and want users to be able to run tests
+without having to install nose themselves, add nose to the setup_requires
+section of your setup()::
+
+ setup(
+ # ...
+ setup_requires=['nose>=1.0']
+ )
+
+This will direct setuptools to download and activate nose during the setup
+process, making the ``nosetests`` command available.
+
+"""
+try:
+ from setuptools import Command
+except ImportError:
+ Command = nosetests = None
+else:
+ from nose.config import Config, option_blacklist, user_config_files, \
+ flag, _bool
+ from nose.core import TestProgram
+ from nose.plugins import DefaultPluginManager
+
+
+ def get_user_options(parser):
+ """convert a optparse option list into a distutils option tuple list"""
+ opt_list = []
+ for opt in parser.option_list:
+ if opt._long_opts[0][2:] in option_blacklist:
+ continue
+ long_name = opt._long_opts[0][2:]
+ if opt.action not in ('store_true', 'store_false'):
+ long_name = long_name + "="
+ short_name = None
+ if opt._short_opts:
+ short_name = opt._short_opts[0][1:]
+ opt_list.append((long_name, short_name, opt.help or ""))
+ return opt_list
+
+
+ class nosetests(Command):
+ description = "Run unit tests using nosetests"
+ __config = Config(files=user_config_files(),
+ plugins=DefaultPluginManager())
+ __parser = __config.getParser()
+ user_options = get_user_options(__parser)
+
+ def initialize_options(self):
+ """create the member variables, but change hyphens to
+ underscores
+ """
+
+ self.option_to_cmds = {}
+ for opt in self.__parser.option_list:
+ cmd_name = opt._long_opts[0][2:]
+ option_name = cmd_name.replace('-', '_')
+ self.option_to_cmds[option_name] = cmd_name
+ setattr(self, option_name, None)
+ self.attr = None
+
+ def finalize_options(self):
+ """nothing to do here"""
+ pass
+
+ def run(self):
+ """ensure tests are capable of being run, then
+ run nose.main with a reconstructed argument list"""
+ if getattr(self.distribution, 'use_2to3', False):
+ # If we run 2to3 we can not do this inplace:
+
+ # Ensure metadata is up-to-date
+ build_py = self.get_finalized_command('build_py')
+ build_py.inplace = 0
+ build_py.run()
+ bpy_cmd = self.get_finalized_command("build_py")
+ build_path = bpy_cmd.build_lib
+
+ # Build extensions
+ egg_info = self.get_finalized_command('egg_info')
+ egg_info.egg_base = build_path
+ egg_info.run()
+
+ build_ext = self.get_finalized_command('build_ext')
+ build_ext.inplace = 0
+ build_ext.run()
+ else:
+ self.run_command('egg_info')
+
+ # Build extensions in-place
+ build_ext = self.get_finalized_command('build_ext')
+ build_ext.inplace = 1
+ build_ext.run()
+
+ if self.distribution.install_requires:
+ self.distribution.fetch_build_eggs(
+ self.distribution.install_requires)
+ if self.distribution.tests_require:
+ self.distribution.fetch_build_eggs(
+ self.distribution.tests_require)
+
+ ei_cmd = self.get_finalized_command("egg_info")
+ argv = ['nosetests', '--where', ei_cmd.egg_base]
+ for (option_name, cmd_name) in self.option_to_cmds.items():
+ if option_name in option_blacklist:
+ continue
+ value = getattr(self, option_name)
+ if value is not None:
+ argv.extend(
+ self.cfgToArg(option_name.replace('_', '-'), value))
+ TestProgram(argv=argv, config=self.__config)
+
+ def cfgToArg(self, optname, value):
+ argv = []
+ long_optname = '--' + optname
+ opt = self.__parser.get_option(long_optname)
+ if opt.action in ('store_true', 'store_false'):
+ if not flag(value):
+ raise ValueError("Invalid value '%s' for '%s'" % (
+ value, optname))
+ if _bool(value):
+ argv.append(long_optname)
+ else:
+ argv.extend([long_optname, value])
+ return argv
diff --git a/lib/spack/external/nose/config.py b/lib/spack/external/nose/config.py
new file mode 100644
index 0000000000..125eb5579d
--- /dev/null
+++ b/lib/spack/external/nose/config.py
@@ -0,0 +1,661 @@
+import logging
+import optparse
+import os
+import re
+import sys
+import ConfigParser
+from optparse import OptionParser
+from nose.util import absdir, tolist
+from nose.plugins.manager import NoPlugins
+from warnings import warn, filterwarnings
+
+log = logging.getLogger(__name__)
+
+# not allowed in config files
+option_blacklist = ['help', 'verbose']
+
+config_files = [
+ # Linux users will prefer this
+ "~/.noserc",
+ # Windows users will prefer this
+ "~/nose.cfg"
+ ]
+
+# plaforms on which the exe check defaults to off
+# Windows and IronPython
+exe_allowed_platforms = ('win32', 'cli')
+
+filterwarnings("always", category=DeprecationWarning,
+ module=r'(.*\.)?nose\.config')
+
+class NoSuchOptionError(Exception):
+ def __init__(self, name):
+ Exception.__init__(self, name)
+ self.name = name
+
+
+class ConfigError(Exception):
+ pass
+
+
+class ConfiguredDefaultsOptionParser(object):
+ """
+ Handler for options from commandline and config files.
+ """
+ def __init__(self, parser, config_section, error=None, file_error=None):
+ self._parser = parser
+ self._config_section = config_section
+ if error is None:
+ error = self._parser.error
+ self._error = error
+ if file_error is None:
+ file_error = lambda msg, **kw: error(msg)
+ self._file_error = file_error
+
+ def _configTuples(self, cfg, filename):
+ config = []
+ if self._config_section in cfg.sections():
+ for name, value in cfg.items(self._config_section):
+ config.append((name, value, filename))
+ return config
+
+ def _readFromFilenames(self, filenames):
+ config = []
+ for filename in filenames:
+ cfg = ConfigParser.RawConfigParser()
+ try:
+ cfg.read(filename)
+ except ConfigParser.Error, exc:
+ raise ConfigError("Error reading config file %r: %s" %
+ (filename, str(exc)))
+ config.extend(self._configTuples(cfg, filename))
+ return config
+
+ def _readFromFileObject(self, fh):
+ cfg = ConfigParser.RawConfigParser()
+ try:
+ filename = fh.name
+ except AttributeError:
+ filename = '<???>'
+ try:
+ cfg.readfp(fh)
+ except ConfigParser.Error, exc:
+ raise ConfigError("Error reading config file %r: %s" %
+ (filename, str(exc)))
+ return self._configTuples(cfg, filename)
+
+ def _readConfiguration(self, config_files):
+ try:
+ config_files.readline
+ except AttributeError:
+ filename_or_filenames = config_files
+ if isinstance(filename_or_filenames, basestring):
+ filenames = [filename_or_filenames]
+ else:
+ filenames = filename_or_filenames
+ config = self._readFromFilenames(filenames)
+ else:
+ fh = config_files
+ config = self._readFromFileObject(fh)
+ return config
+
+ def _processConfigValue(self, name, value, values, parser):
+ opt_str = '--' + name
+ option = parser.get_option(opt_str)
+ if option is None:
+ raise NoSuchOptionError(name)
+ else:
+ option.process(opt_str, value, values, parser)
+
+ def _applyConfigurationToValues(self, parser, config, values):
+ for name, value, filename in config:
+ if name in option_blacklist:
+ continue
+ try:
+ self._processConfigValue(name, value, values, parser)
+ except NoSuchOptionError, exc:
+ self._file_error(
+ "Error reading config file %r: "
+ "no such option %r" % (filename, exc.name),
+ name=name, filename=filename)
+ except optparse.OptionValueError, exc:
+ msg = str(exc).replace('--' + name, repr(name), 1)
+ self._file_error("Error reading config file %r: "
+ "%s" % (filename, msg),
+ name=name, filename=filename)
+
+ def parseArgsAndConfigFiles(self, args, config_files):
+ values = self._parser.get_default_values()
+ try:
+ config = self._readConfiguration(config_files)
+ except ConfigError, exc:
+ self._error(str(exc))
+ else:
+ try:
+ self._applyConfigurationToValues(self._parser, config, values)
+ except ConfigError, exc:
+ self._error(str(exc))
+ return self._parser.parse_args(args, values)
+
+
+class Config(object):
+ """nose configuration.
+
+ Instances of Config are used throughout nose to configure
+ behavior, including plugin lists. Here are the default values for
+ all config keys::
+
+ self.env = env = kw.pop('env', {})
+ self.args = ()
+ self.testMatch = re.compile(r'(?:^|[\\b_\\.%s-])[Tt]est' % os.sep)
+ self.addPaths = not env.get('NOSE_NOPATH', False)
+ self.configSection = 'nosetests'
+ self.debug = env.get('NOSE_DEBUG')
+ self.debugLog = env.get('NOSE_DEBUG_LOG')
+ self.exclude = None
+ self.getTestCaseNamesCompat = False
+ self.includeExe = env.get('NOSE_INCLUDE_EXE',
+ sys.platform in exe_allowed_platforms)
+ self.ignoreFiles = (re.compile(r'^\.'),
+ re.compile(r'^_'),
+ re.compile(r'^setup\.py$')
+ )
+ self.include = None
+ self.loggingConfig = None
+ self.logStream = sys.stderr
+ self.options = NoOptions()
+ self.parser = None
+ self.plugins = NoPlugins()
+ self.srcDirs = ('lib', 'src')
+ self.runOnInit = True
+ self.stopOnError = env.get('NOSE_STOP', False)
+ self.stream = sys.stderr
+ self.testNames = ()
+ self.verbosity = int(env.get('NOSE_VERBOSE', 1))
+ self.where = ()
+ self.py3where = ()
+ self.workingDir = None
+ """
+
+ def __init__(self, **kw):
+ self.env = env = kw.pop('env', {})
+ self.args = ()
+ self.testMatchPat = env.get('NOSE_TESTMATCH',
+ r'(?:^|[\b_\.%s-])[Tt]est' % os.sep)
+ self.testMatch = re.compile(self.testMatchPat)
+ self.addPaths = not env.get('NOSE_NOPATH', False)
+ self.configSection = 'nosetests'
+ self.debug = env.get('NOSE_DEBUG')
+ self.debugLog = env.get('NOSE_DEBUG_LOG')
+ self.exclude = None
+ self.getTestCaseNamesCompat = False
+ self.includeExe = env.get('NOSE_INCLUDE_EXE',
+ sys.platform in exe_allowed_platforms)
+ self.ignoreFilesDefaultStrings = [r'^\.',
+ r'^_',
+ r'^setup\.py$',
+ ]
+ self.ignoreFiles = map(re.compile, self.ignoreFilesDefaultStrings)
+ self.include = None
+ self.loggingConfig = None
+ self.logStream = sys.stderr
+ self.options = NoOptions()
+ self.parser = None
+ self.plugins = NoPlugins()
+ self.srcDirs = ('lib', 'src')
+ self.runOnInit = True
+ self.stopOnError = env.get('NOSE_STOP', False)
+ self.stream = sys.stderr
+ self.testNames = []
+ self.verbosity = int(env.get('NOSE_VERBOSE', 1))
+ self.where = ()
+ self.py3where = ()
+ self.workingDir = os.getcwd()
+ self.traverseNamespace = False
+ self.firstPackageWins = False
+ self.parserClass = OptionParser
+ self.worker = False
+
+ self._default = self.__dict__.copy()
+ self.update(kw)
+ self._orig = self.__dict__.copy()
+
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ del state['stream']
+ del state['_orig']
+ del state['_default']
+ del state['env']
+ del state['logStream']
+ # FIXME remove plugins, have only plugin manager class
+ state['plugins'] = self.plugins.__class__
+ return state
+
+ def __setstate__(self, state):
+ plugincls = state.pop('plugins')
+ self.update(state)
+ self.worker = True
+ # FIXME won't work for static plugin lists
+ self.plugins = plugincls()
+ self.plugins.loadPlugins()
+ # needed so .can_configure gets set appropriately
+ dummy_parser = self.parserClass()
+ self.plugins.addOptions(dummy_parser, {})
+ self.plugins.configure(self.options, self)
+
+ def __repr__(self):
+ d = self.__dict__.copy()
+ # don't expose env, could include sensitive info
+ d['env'] = {}
+ keys = [ k for k in d.keys()
+ if not k.startswith('_') ]
+ keys.sort()
+ return "Config(%s)" % ', '.join([ '%s=%r' % (k, d[k])
+ for k in keys ])
+ __str__ = __repr__
+
+ def _parseArgs(self, argv, cfg_files):
+ def warn_sometimes(msg, name=None, filename=None):
+ if (hasattr(self.plugins, 'excludedOption') and
+ self.plugins.excludedOption(name)):
+ msg = ("Option %r in config file %r ignored: "
+ "excluded by runtime environment" %
+ (name, filename))
+ warn(msg, RuntimeWarning)
+ else:
+ raise ConfigError(msg)
+ parser = ConfiguredDefaultsOptionParser(
+ self.getParser(), self.configSection, file_error=warn_sometimes)
+ return parser.parseArgsAndConfigFiles(argv[1:], cfg_files)
+
+ def configure(self, argv=None, doc=None):
+ """Configure the nose running environment. Execute configure before
+ collecting tests with nose.TestCollector to enable output capture and
+ other features.
+ """
+ env = self.env
+ if argv is None:
+ argv = sys.argv
+
+ cfg_files = getattr(self, 'files', [])
+ options, args = self._parseArgs(argv, cfg_files)
+ # If -c --config has been specified on command line,
+ # load those config files and reparse
+ if getattr(options, 'files', []):
+ options, args = self._parseArgs(argv, options.files)
+
+ self.options = options
+ if args:
+ self.testNames = args
+ if options.testNames is not None:
+ self.testNames.extend(tolist(options.testNames))
+
+ if options.py3where is not None:
+ if sys.version_info >= (3,):
+ options.where = options.py3where
+
+ # `where` is an append action, so it can't have a default value
+ # in the parser, or that default will always be in the list
+ if not options.where:
+ options.where = env.get('NOSE_WHERE', None)
+
+ # include and exclude also
+ if not options.ignoreFiles:
+ options.ignoreFiles = env.get('NOSE_IGNORE_FILES', [])
+ if not options.include:
+ options.include = env.get('NOSE_INCLUDE', [])
+ if not options.exclude:
+ options.exclude = env.get('NOSE_EXCLUDE', [])
+
+ self.addPaths = options.addPaths
+ self.stopOnError = options.stopOnError
+ self.verbosity = options.verbosity
+ self.includeExe = options.includeExe
+ self.traverseNamespace = options.traverseNamespace
+ self.debug = options.debug
+ self.debugLog = options.debugLog
+ self.loggingConfig = options.loggingConfig
+ self.firstPackageWins = options.firstPackageWins
+ self.configureLogging()
+
+ if not options.byteCompile:
+ sys.dont_write_bytecode = True
+
+ if options.where is not None:
+ self.configureWhere(options.where)
+
+ if options.testMatch:
+ self.testMatch = re.compile(options.testMatch)
+
+ if options.ignoreFiles:
+ self.ignoreFiles = map(re.compile, tolist(options.ignoreFiles))
+ log.info("Ignoring files matching %s", options.ignoreFiles)
+ else:
+ log.info("Ignoring files matching %s", self.ignoreFilesDefaultStrings)
+
+ if options.include:
+ self.include = map(re.compile, tolist(options.include))
+ log.info("Including tests matching %s", options.include)
+
+ if options.exclude:
+ self.exclude = map(re.compile, tolist(options.exclude))
+ log.info("Excluding tests matching %s", options.exclude)
+
+ # When listing plugins we don't want to run them
+ if not options.showPlugins:
+ self.plugins.configure(options, self)
+ self.plugins.begin()
+
+ def configureLogging(self):
+ """Configure logging for nose, or optionally other packages. Any logger
+ name may be set with the debug option, and that logger will be set to
+ debug level and be assigned the same handler as the nose loggers, unless
+ it already has a handler.
+ """
+ if self.loggingConfig:
+ from logging.config import fileConfig
+ fileConfig(self.loggingConfig)
+ return
+
+ format = logging.Formatter('%(name)s: %(levelname)s: %(message)s')
+ if self.debugLog:
+ handler = logging.FileHandler(self.debugLog)
+ else:
+ handler = logging.StreamHandler(self.logStream)
+ handler.setFormatter(format)
+
+ logger = logging.getLogger('nose')
+ logger.propagate = 0
+
+ # only add our default handler if there isn't already one there
+ # this avoids annoying duplicate log messages.
+ found = False
+ if self.debugLog:
+ debugLogAbsPath = os.path.abspath(self.debugLog)
+ for h in logger.handlers:
+ if type(h) == logging.FileHandler and \
+ h.baseFilename == debugLogAbsPath:
+ found = True
+ else:
+ for h in logger.handlers:
+ if type(h) == logging.StreamHandler and \
+ h.stream == self.logStream:
+ found = True
+ if not found:
+ logger.addHandler(handler)
+
+ # default level
+ lvl = logging.WARNING
+ if self.verbosity >= 5:
+ lvl = 0
+ elif self.verbosity >= 4:
+ lvl = logging.DEBUG
+ elif self.verbosity >= 3:
+ lvl = logging.INFO
+ logger.setLevel(lvl)
+
+ # individual overrides
+ if self.debug:
+ # no blanks
+ debug_loggers = [ name for name in self.debug.split(',')
+ if name ]
+ for logger_name in debug_loggers:
+ l = logging.getLogger(logger_name)
+ l.setLevel(logging.DEBUG)
+ if not l.handlers and not logger_name.startswith('nose'):
+ l.addHandler(handler)
+
+ def configureWhere(self, where):
+ """Configure the working directory or directories for the test run.
+ """
+ from nose.importer import add_path
+ self.workingDir = None
+ where = tolist(where)
+ warned = False
+ for path in where:
+ if not self.workingDir:
+ abs_path = absdir(path)
+ if abs_path is None:
+ raise ValueError("Working directory '%s' not found, or "
+ "not a directory" % path)
+ log.info("Set working dir to %s", abs_path)
+ self.workingDir = abs_path
+ if self.addPaths and \
+ os.path.exists(os.path.join(abs_path, '__init__.py')):
+ log.info("Working directory %s is a package; "
+ "adding to sys.path" % abs_path)
+ add_path(abs_path)
+ continue
+ if not warned:
+ warn("Use of multiple -w arguments is deprecated and "
+ "support may be removed in a future release. You can "
+ "get the same behavior by passing directories without "
+ "the -w argument on the command line, or by using the "
+ "--tests argument in a configuration file.",
+ DeprecationWarning)
+ warned = True
+ self.testNames.append(path)
+
+ def default(self):
+ """Reset all config values to defaults.
+ """
+ self.__dict__.update(self._default)
+
+ def getParser(self, doc=None):
+ """Get the command line option parser.
+ """
+ if self.parser:
+ return self.parser
+ env = self.env
+ parser = self.parserClass(doc)
+ parser.add_option(
+ "-V","--version", action="store_true",
+ dest="version", default=False,
+ help="Output nose version and exit")
+ parser.add_option(
+ "-p", "--plugins", action="store_true",
+ dest="showPlugins", default=False,
+ help="Output list of available plugins and exit. Combine with "
+ "higher verbosity for greater detail")
+ parser.add_option(
+ "-v", "--verbose",
+ action="count", dest="verbosity",
+ default=self.verbosity,
+ help="Be more verbose. [NOSE_VERBOSE]")
+ parser.add_option(
+ "--verbosity", action="store", dest="verbosity",
+ metavar='VERBOSITY',
+ type="int", help="Set verbosity; --verbosity=2 is "
+ "the same as -v")
+ parser.add_option(
+ "-q", "--quiet", action="store_const", const=0, dest="verbosity",
+ help="Be less verbose")
+ parser.add_option(
+ "-c", "--config", action="append", dest="files",
+ metavar="FILES",
+ help="Load configuration from config file(s). May be specified "
+ "multiple times; in that case, all config files will be "
+ "loaded and combined")
+ parser.add_option(
+ "-w", "--where", action="append", dest="where",
+ metavar="WHERE",
+ help="Look for tests in this directory. "
+ "May be specified multiple times. The first directory passed "
+ "will be used as the working directory, in place of the current "
+ "working directory, which is the default. Others will be added "
+ "to the list of tests to execute. [NOSE_WHERE]"
+ )
+ parser.add_option(
+ "--py3where", action="append", dest="py3where",
+ metavar="PY3WHERE",
+ help="Look for tests in this directory under Python 3.x. "
+ "Functions the same as 'where', but only applies if running under "
+ "Python 3.x or above. Note that, if present under 3.x, this "
+ "option completely replaces any directories specified with "
+ "'where', so the 'where' option becomes ineffective. "
+ "[NOSE_PY3WHERE]"
+ )
+ parser.add_option(
+ "-m", "--match", "--testmatch", action="store",
+ dest="testMatch", metavar="REGEX",
+ help="Files, directories, function names, and class names "
+ "that match this regular expression are considered tests. "
+ "Default: %s [NOSE_TESTMATCH]" % self.testMatchPat,
+ default=self.testMatchPat)
+ parser.add_option(
+ "--tests", action="store", dest="testNames", default=None,
+ metavar='NAMES',
+ help="Run these tests (comma-separated list). This argument is "
+ "useful mainly from configuration files; on the command line, "
+ "just pass the tests to run as additional arguments with no "
+ "switch.")
+ parser.add_option(
+ "-l", "--debug", action="store",
+ dest="debug", default=self.debug,
+ help="Activate debug logging for one or more systems. "
+ "Available debug loggers: nose, nose.importer, "
+ "nose.inspector, nose.plugins, nose.result and "
+ "nose.selector. Separate multiple names with a comma.")
+ parser.add_option(
+ "--debug-log", dest="debugLog", action="store",
+ default=self.debugLog, metavar="FILE",
+ help="Log debug messages to this file "
+ "(default: sys.stderr)")
+ parser.add_option(
+ "--logging-config", "--log-config",
+ dest="loggingConfig", action="store",
+ default=self.loggingConfig, metavar="FILE",
+ help="Load logging config from this file -- bypasses all other"
+ " logging config settings.")
+ parser.add_option(
+ "-I", "--ignore-files", action="append", dest="ignoreFiles",
+ metavar="REGEX",
+ help="Completely ignore any file that matches this regular "
+ "expression. Takes precedence over any other settings or "
+ "plugins. "
+ "Specifying this option will replace the default setting. "
+ "Specify this option multiple times "
+ "to add more regular expressions [NOSE_IGNORE_FILES]")
+ parser.add_option(
+ "-e", "--exclude", action="append", dest="exclude",
+ metavar="REGEX",
+ help="Don't run tests that match regular "
+ "expression [NOSE_EXCLUDE]")
+ parser.add_option(
+ "-i", "--include", action="append", dest="include",
+ metavar="REGEX",
+ help="This regular expression will be applied to files, "
+ "directories, function names, and class names for a chance "
+ "to include additional tests that do not match TESTMATCH. "
+ "Specify this option multiple times "
+ "to add more regular expressions [NOSE_INCLUDE]")
+ parser.add_option(
+ "-x", "--stop", action="store_true", dest="stopOnError",
+ default=self.stopOnError,
+ help="Stop running tests after the first error or failure")
+ parser.add_option(
+ "-P", "--no-path-adjustment", action="store_false",
+ dest="addPaths",
+ default=self.addPaths,
+ help="Don't make any changes to sys.path when "
+ "loading tests [NOSE_NOPATH]")
+ parser.add_option(
+ "--exe", action="store_true", dest="includeExe",
+ default=self.includeExe,
+ help="Look for tests in python modules that are "
+ "executable. Normal behavior is to exclude executable "
+ "modules, since they may not be import-safe "
+ "[NOSE_INCLUDE_EXE]")
+ parser.add_option(
+ "--noexe", action="store_false", dest="includeExe",
+ help="DO NOT look for tests in python modules that are "
+ "executable. (The default on the windows platform is to "
+ "do so.)")
+ parser.add_option(
+ "--traverse-namespace", action="store_true",
+ default=self.traverseNamespace, dest="traverseNamespace",
+ help="Traverse through all path entries of a namespace package")
+ parser.add_option(
+ "--first-package-wins", "--first-pkg-wins", "--1st-pkg-wins",
+ action="store_true", default=False, dest="firstPackageWins",
+ help="nose's importer will normally evict a package from sys."
+ "modules if it sees a package with the same name in a different "
+ "location. Set this option to disable that behavior.")
+ parser.add_option(
+ "--no-byte-compile",
+ action="store_false", default=True, dest="byteCompile",
+ help="Prevent nose from byte-compiling the source into .pyc files "
+ "while nose is scanning for and running tests.")
+
+ self.plugins.loadPlugins()
+ self.pluginOpts(parser)
+
+ self.parser = parser
+ return parser
+
+ def help(self, doc=None):
+ """Return the generated help message
+ """
+ return self.getParser(doc).format_help()
+
+ def pluginOpts(self, parser):
+ self.plugins.addOptions(parser, self.env)
+
+ def reset(self):
+ self.__dict__.update(self._orig)
+
+ def todict(self):
+ return self.__dict__.copy()
+
+ def update(self, d):
+ self.__dict__.update(d)
+
+
+class NoOptions(object):
+ """Options container that returns None for all options.
+ """
+ def __getstate__(self):
+ return {}
+
+ def __setstate__(self, state):
+ pass
+
+ def __getnewargs__(self):
+ return ()
+
+ def __nonzero__(self):
+ return False
+
+
+def user_config_files():
+ """Return path to any existing user config files
+ """
+ return filter(os.path.exists,
+ map(os.path.expanduser, config_files))
+
+
+def all_config_files():
+ """Return path to any existing user config files, plus any setup.cfg
+ in the current working directory.
+ """
+ user = user_config_files()
+ if os.path.exists('setup.cfg'):
+ return user + ['setup.cfg']
+ return user
+
+
+# used when parsing config files
+def flag(val):
+ """Does the value look like an on/off flag?"""
+ if val == 1:
+ return True
+ elif val == 0:
+ return False
+ val = str(val)
+ if len(val) > 5:
+ return False
+ return val.upper() in ('1', '0', 'F', 'T', 'TRUE', 'FALSE', 'ON', 'OFF')
+
+
+def _bool(val):
+ return str(val).upper() in ('1', 'T', 'TRUE', 'ON')
diff --git a/lib/spack/external/nose/core.py b/lib/spack/external/nose/core.py
new file mode 100644
index 0000000000..49e7939b98
--- /dev/null
+++ b/lib/spack/external/nose/core.py
@@ -0,0 +1,341 @@
+"""Implements nose test program and collector.
+"""
+from __future__ import generators
+
+import logging
+import os
+import sys
+import time
+import unittest
+
+from nose.config import Config, all_config_files
+from nose.loader import defaultTestLoader
+from nose.plugins.manager import PluginManager, DefaultPluginManager, \
+ RestrictedPluginManager
+from nose.result import TextTestResult
+from nose.suite import FinalizingSuiteWrapper
+from nose.util import isclass, tolist
+
+
+log = logging.getLogger('nose.core')
+compat_24 = sys.version_info >= (2, 4)
+
+__all__ = ['TestProgram', 'main', 'run', 'run_exit', 'runmodule', 'collector',
+ 'TextTestRunner']
+
+
+class TextTestRunner(unittest.TextTestRunner):
+ """Test runner that uses nose's TextTestResult to enable errorClasses,
+ as well as providing hooks for plugins to override or replace the test
+ output stream, results, and the test case itself.
+ """
+ def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1,
+ config=None):
+ if config is None:
+ config = Config()
+ self.config = config
+ unittest.TextTestRunner.__init__(self, stream, descriptions, verbosity)
+
+
+ def _makeResult(self):
+ return TextTestResult(self.stream,
+ self.descriptions,
+ self.verbosity,
+ self.config)
+
+ def run(self, test):
+ """Overrides to provide plugin hooks and defer all output to
+ the test result class.
+ """
+ wrapper = self.config.plugins.prepareTest(test)
+ if wrapper is not None:
+ test = wrapper
+
+ # plugins can decorate or capture the output stream
+ wrapped = self.config.plugins.setOutputStream(self.stream)
+ if wrapped is not None:
+ self.stream = wrapped
+
+ result = self._makeResult()
+ start = time.time()
+ try:
+ test(result)
+ except KeyboardInterrupt:
+ pass
+ stop = time.time()
+ result.printErrors()
+ result.printSummary(start, stop)
+ self.config.plugins.finalize(result)
+ return result
+
+
+class TestProgram(unittest.TestProgram):
+ """Collect and run tests, returning success or failure.
+
+ The arguments to TestProgram() are the same as to
+ :func:`main()` and :func:`run()`:
+
+ * module: All tests are in this module (default: None)
+ * defaultTest: Tests to load (default: '.')
+ * argv: Command line arguments (default: None; sys.argv is read)
+ * testRunner: Test runner instance (default: None)
+ * testLoader: Test loader instance (default: None)
+ * env: Environment; ignored if config is provided (default: None;
+ os.environ is read)
+ * config: :class:`nose.config.Config` instance (default: None)
+ * suite: Suite or list of tests to run (default: None). Passing a
+ suite or lists of tests will bypass all test discovery and
+ loading. *ALSO NOTE* that if you pass a unittest.TestSuite
+ instance as the suite, context fixtures at the class, module and
+ package level will not be used, and many plugin hooks will not
+ be called. If you want normal nose behavior, either pass a list
+ of tests, or a fully-configured :class:`nose.suite.ContextSuite`.
+ * exit: Exit after running tests and printing report (default: True)
+ * plugins: List of plugins to use; ignored if config is provided
+ (default: load plugins with DefaultPluginManager)
+ * addplugins: List of **extra** plugins to use. Pass a list of plugin
+ instances in this argument to make custom plugins available while
+ still using the DefaultPluginManager.
+ """
+ verbosity = 1
+
+ def __init__(self, module=None, defaultTest='.', argv=None,
+ testRunner=None, testLoader=None, env=None, config=None,
+ suite=None, exit=True, plugins=None, addplugins=None):
+ if env is None:
+ env = os.environ
+ if config is None:
+ config = self.makeConfig(env, plugins)
+ if addplugins:
+ config.plugins.addPlugins(extraplugins=addplugins)
+ self.config = config
+ self.suite = suite
+ self.exit = exit
+ extra_args = {}
+ version = sys.version_info[0:2]
+ if version >= (2,7) and version != (3,0):
+ extra_args['exit'] = exit
+ unittest.TestProgram.__init__(
+ self, module=module, defaultTest=defaultTest,
+ argv=argv, testRunner=testRunner, testLoader=testLoader,
+ **extra_args)
+
+ def getAllConfigFiles(self, env=None):
+ env = env or {}
+ if env.get('NOSE_IGNORE_CONFIG_FILES', False):
+ return []
+ else:
+ return all_config_files()
+
+ def makeConfig(self, env, plugins=None):
+ """Load a Config, pre-filled with user config files if any are
+ found.
+ """
+ cfg_files = self.getAllConfigFiles(env)
+ if plugins:
+ manager = PluginManager(plugins=plugins)
+ else:
+ manager = DefaultPluginManager()
+ return Config(
+ env=env, files=cfg_files, plugins=manager)
+
+ def parseArgs(self, argv):
+ """Parse argv and env and configure running environment.
+ """
+ self.config.configure(argv, doc=self.usage())
+ log.debug("configured %s", self.config)
+
+ # quick outs: version, plugins (optparse would have already
+ # caught and exited on help)
+ if self.config.options.version:
+ from nose import __version__
+ sys.stdout = sys.__stdout__
+ print "%s version %s" % (os.path.basename(sys.argv[0]), __version__)
+ sys.exit(0)
+
+ if self.config.options.showPlugins:
+ self.showPlugins()
+ sys.exit(0)
+
+ if self.testLoader is None:
+ self.testLoader = defaultTestLoader(config=self.config)
+ elif isclass(self.testLoader):
+ self.testLoader = self.testLoader(config=self.config)
+ plug_loader = self.config.plugins.prepareTestLoader(self.testLoader)
+ if plug_loader is not None:
+ self.testLoader = plug_loader
+ log.debug("test loader is %s", self.testLoader)
+
+ # FIXME if self.module is a string, add it to self.testNames? not sure
+
+ if self.config.testNames:
+ self.testNames = self.config.testNames
+ else:
+ self.testNames = tolist(self.defaultTest)
+ log.debug('defaultTest %s', self.defaultTest)
+ log.debug('Test names are %s', self.testNames)
+ if self.config.workingDir is not None:
+ os.chdir(self.config.workingDir)
+ self.createTests()
+
+ def createTests(self):
+ """Create the tests to run. If a self.suite
+ is set, then that suite will be used. Otherwise, tests will be
+ loaded from the given test names (self.testNames) using the
+ test loader.
+ """
+ log.debug("createTests called with %s", self.suite)
+ if self.suite is not None:
+ # We were given an explicit suite to run. Make sure it's
+ # loaded and wrapped correctly.
+ self.test = self.testLoader.suiteClass(self.suite)
+ else:
+ self.test = self.testLoader.loadTestsFromNames(self.testNames)
+
+ def runTests(self):
+ """Run Tests. Returns true on success, false on failure, and sets
+ self.success to the same value.
+ """
+ log.debug("runTests called")
+ if self.testRunner is None:
+ self.testRunner = TextTestRunner(stream=self.config.stream,
+ verbosity=self.config.verbosity,
+ config=self.config)
+ plug_runner = self.config.plugins.prepareTestRunner(self.testRunner)
+ if plug_runner is not None:
+ self.testRunner = plug_runner
+ result = self.testRunner.run(self.test)
+ self.success = result.wasSuccessful()
+ if self.exit:
+ sys.exit(not self.success)
+ return self.success
+
+ def showPlugins(self):
+ """Print list of available plugins.
+ """
+ import textwrap
+
+ class DummyParser:
+ def __init__(self):
+ self.options = []
+ def add_option(self, *arg, **kw):
+ self.options.append((arg, kw.pop('help', '')))
+
+ v = self.config.verbosity
+ self.config.plugins.sort()
+ for p in self.config.plugins:
+ print "Plugin %s" % p.name
+ if v >= 2:
+ print " score: %s" % p.score
+ print '\n'.join(textwrap.wrap(p.help().strip(),
+ initial_indent=' ',
+ subsequent_indent=' '))
+ if v >= 3:
+ parser = DummyParser()
+ p.addOptions(parser)
+ if len(parser.options):
+ print
+ print " Options:"
+ for opts, help in parser.options:
+ print ' %s' % (', '.join(opts))
+ if help:
+ print '\n'.join(
+ textwrap.wrap(help.strip(),
+ initial_indent=' ',
+ subsequent_indent=' '))
+ print
+
+ def usage(cls):
+ import nose
+ try:
+ ld = nose.__loader__
+ text = ld.get_data(os.path.join(
+ os.path.dirname(__file__), 'usage.txt'))
+ except AttributeError:
+ f = open(os.path.join(
+ os.path.dirname(__file__), 'usage.txt'), 'r')
+ try:
+ text = f.read()
+ finally:
+ f.close()
+ # Ensure that we return str, not bytes.
+ if not isinstance(text, str):
+ text = text.decode('utf-8')
+ return text
+ usage = classmethod(usage)
+
+# backwards compatibility
+run_exit = main = TestProgram
+
+
+def run(*arg, **kw):
+ """Collect and run tests, returning success or failure.
+
+ The arguments to `run()` are the same as to `main()`:
+
+ * module: All tests are in this module (default: None)
+ * defaultTest: Tests to load (default: '.')
+ * argv: Command line arguments (default: None; sys.argv is read)
+ * testRunner: Test runner instance (default: None)
+ * testLoader: Test loader instance (default: None)
+ * env: Environment; ignored if config is provided (default: None;
+ os.environ is read)
+ * config: :class:`nose.config.Config` instance (default: None)
+ * suite: Suite or list of tests to run (default: None). Passing a
+ suite or lists of tests will bypass all test discovery and
+ loading. *ALSO NOTE* that if you pass a unittest.TestSuite
+ instance as the suite, context fixtures at the class, module and
+ package level will not be used, and many plugin hooks will not
+ be called. If you want normal nose behavior, either pass a list
+ of tests, or a fully-configured :class:`nose.suite.ContextSuite`.
+ * plugins: List of plugins to use; ignored if config is provided
+ (default: load plugins with DefaultPluginManager)
+ * addplugins: List of **extra** plugins to use. Pass a list of plugin
+ instances in this argument to make custom plugins available while
+ still using the DefaultPluginManager.
+
+ With the exception that the ``exit`` argument is always set
+ to False.
+ """
+ kw['exit'] = False
+ return TestProgram(*arg, **kw).success
+
+
+def runmodule(name='__main__', **kw):
+ """Collect and run tests in a single module only. Defaults to running
+ tests in __main__. Additional arguments to TestProgram may be passed
+ as keyword arguments.
+ """
+ main(defaultTest=name, **kw)
+
+
+def collector():
+ """TestSuite replacement entry point. Use anywhere you might use a
+ unittest.TestSuite. The collector will, by default, load options from
+ all config files and execute loader.loadTestsFromNames() on the
+ configured testNames, or '.' if no testNames are configured.
+ """
+ # plugins that implement any of these methods are disabled, since
+ # we don't control the test runner and won't be able to run them
+ # finalize() is also not called, but plugins that use it aren't disabled,
+ # because capture needs it.
+ setuptools_incompat = ('report', 'prepareTest',
+ 'prepareTestLoader', 'prepareTestRunner',
+ 'setOutputStream')
+
+ plugins = RestrictedPluginManager(exclude=setuptools_incompat)
+ conf = Config(files=all_config_files(),
+ plugins=plugins)
+ conf.configure(argv=['collector'])
+ loader = defaultTestLoader(conf)
+
+ if conf.testNames:
+ suite = loader.loadTestsFromNames(conf.testNames)
+ else:
+ suite = loader.loadTestsFromNames(('.',))
+ return FinalizingSuiteWrapper(suite, plugins.finalize)
+
+
+
+if __name__ == '__main__':
+ main()
diff --git a/lib/spack/external/nose/exc.py b/lib/spack/external/nose/exc.py
new file mode 100644
index 0000000000..8b780db0d4
--- /dev/null
+++ b/lib/spack/external/nose/exc.py
@@ -0,0 +1,9 @@
+"""Exceptions for marking tests as skipped or deprecated.
+
+This module exists to provide backwards compatibility with previous
+versions of nose where skipped and deprecated tests were core
+functionality, rather than being provided by plugins. It may be
+removed in a future release.
+"""
+from nose.plugins.skip import SkipTest
+from nose.plugins.deprecated import DeprecatedTest
diff --git a/lib/spack/external/nose/ext/__init__.py b/lib/spack/external/nose/ext/__init__.py
new file mode 100644
index 0000000000..5fd1516a09
--- /dev/null
+++ b/lib/spack/external/nose/ext/__init__.py
@@ -0,0 +1,3 @@
+"""
+External or vendor files
+"""
diff --git a/lib/spack/external/nose/ext/dtcompat.py b/lib/spack/external/nose/ext/dtcompat.py
new file mode 100644
index 0000000000..332cf08c12
--- /dev/null
+++ b/lib/spack/external/nose/ext/dtcompat.py
@@ -0,0 +1,2272 @@
+# Module doctest.
+# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
+# Major enhancements and refactoring by:
+# Jim Fulton
+# Edward Loper
+
+# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
+#
+# Modified for inclusion in nose to provide support for DocFileTest in
+# python 2.3:
+#
+# - all doctests removed from module (they fail under 2.3 and 2.5)
+# - now handles the $py.class extension when ran under Jython
+
+r"""Module doctest -- a framework for running examples in docstrings.
+
+In simplest use, end each module M to be tested with:
+
+def _test():
+ import doctest
+ doctest.testmod()
+
+if __name__ == "__main__":
+ _test()
+
+Then running the module as a script will cause the examples in the
+docstrings to get executed and verified:
+
+python M.py
+
+This won't display anything unless an example fails, in which case the
+failing example(s) and the cause(s) of the failure(s) are printed to stdout
+(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
+line of output is "Test failed.".
+
+Run it with the -v switch instead:
+
+python M.py -v
+
+and a detailed report of all examples tried is printed to stdout, along
+with assorted summaries at the end.
+
+You can force verbose mode by passing "verbose=True" to testmod, or prohibit
+it by passing "verbose=False". In either of those cases, sys.argv is not
+examined by testmod.
+
+There are a variety of other ways to run doctests, including integration
+with the unittest framework, and support for running non-Python text
+files containing doctests. There are also many ways to override parts
+of doctest's default behaviors. See the Library Reference Manual for
+details.
+"""
+
+__docformat__ = 'reStructuredText en'
+
+__all__ = [
+ # 0, Option Flags
+ 'register_optionflag',
+ 'DONT_ACCEPT_TRUE_FOR_1',
+ 'DONT_ACCEPT_BLANKLINE',
+ 'NORMALIZE_WHITESPACE',
+ 'ELLIPSIS',
+ 'IGNORE_EXCEPTION_DETAIL',
+ 'COMPARISON_FLAGS',
+ 'REPORT_UDIFF',
+ 'REPORT_CDIFF',
+ 'REPORT_NDIFF',
+ 'REPORT_ONLY_FIRST_FAILURE',
+ 'REPORTING_FLAGS',
+ # 1. Utility Functions
+ 'is_private',
+ # 2. Example & DocTest
+ 'Example',
+ 'DocTest',
+ # 3. Doctest Parser
+ 'DocTestParser',
+ # 4. Doctest Finder
+ 'DocTestFinder',
+ # 5. Doctest Runner
+ 'DocTestRunner',
+ 'OutputChecker',
+ 'DocTestFailure',
+ 'UnexpectedException',
+ 'DebugRunner',
+ # 6. Test Functions
+ 'testmod',
+ 'testfile',
+ 'run_docstring_examples',
+ # 7. Tester
+ 'Tester',
+ # 8. Unittest Support
+ 'DocTestSuite',
+ 'DocFileSuite',
+ 'set_unittest_reportflags',
+ # 9. Debugging Support
+ 'script_from_examples',
+ 'testsource',
+ 'debug_src',
+ 'debug',
+]
+
+import __future__
+
+import sys, traceback, inspect, linecache, os, re
+import unittest, difflib, pdb, tempfile
+import warnings
+from StringIO import StringIO
+
+# Don't whine about the deprecated is_private function in this
+# module's tests.
+warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
+ __name__, 0)
+
+# There are 4 basic classes:
+# - Example: a <source, want> pair, plus an intra-docstring line number.
+# - DocTest: a collection of examples, parsed from a docstring, plus
+# info about where the docstring came from (name, filename, lineno).
+# - DocTestFinder: extracts DocTests from a given object's docstring and
+# its contained objects' docstrings.
+# - DocTestRunner: runs DocTest cases, and accumulates statistics.
+#
+# So the basic picture is:
+#
+# list of:
+# +------+ +---------+ +-------+
+# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
+# +------+ +---------+ +-------+
+# | Example |
+# | ... |
+# | Example |
+# +---------+
+
+# Option constants.
+
+OPTIONFLAGS_BY_NAME = {}
+def register_optionflag(name):
+ # Create a new flag unless `name` is already known.
+ return OPTIONFLAGS_BY_NAME.setdefault(name, 1 << len(OPTIONFLAGS_BY_NAME))
+
+DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
+DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
+NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
+ELLIPSIS = register_optionflag('ELLIPSIS')
+IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
+
+COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
+ DONT_ACCEPT_BLANKLINE |
+ NORMALIZE_WHITESPACE |
+ ELLIPSIS |
+ IGNORE_EXCEPTION_DETAIL)
+
+REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
+REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
+REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
+REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
+
+REPORTING_FLAGS = (REPORT_UDIFF |
+ REPORT_CDIFF |
+ REPORT_NDIFF |
+ REPORT_ONLY_FIRST_FAILURE)
+
+# Special string markers for use in `want` strings:
+BLANKLINE_MARKER = '<BLANKLINE>'
+ELLIPSIS_MARKER = '...'
+
+######################################################################
+## Table of Contents
+######################################################################
+# 1. Utility Functions
+# 2. Example & DocTest -- store test cases
+# 3. DocTest Parser -- extracts examples from strings
+# 4. DocTest Finder -- extracts test cases from objects
+# 5. DocTest Runner -- runs test cases
+# 6. Test Functions -- convenient wrappers for testing
+# 7. Tester Class -- for backwards compatibility
+# 8. Unittest Support
+# 9. Debugging Support
+# 10. Example Usage
+
+######################################################################
+## 1. Utility Functions
+######################################################################
+
+def is_private(prefix, base):
+ """prefix, base -> true iff name prefix + "." + base is "private".
+
+ Prefix may be an empty string, and base does not contain a period.
+ Prefix is ignored (although functions you write conforming to this
+ protocol may make use of it).
+ Return true iff base begins with an (at least one) underscore, but
+ does not both begin and end with (at least) two underscores.
+ """
+ warnings.warn("is_private is deprecated; it wasn't useful; "
+ "examine DocTestFinder.find() lists instead",
+ DeprecationWarning, stacklevel=2)
+ return base[:1] == "_" and not base[:2] == "__" == base[-2:]
+
+def _extract_future_flags(globs):
+ """
+ Return the compiler-flags associated with the future features that
+ have been imported into the given namespace (globs).
+ """
+ flags = 0
+ for fname in __future__.all_feature_names:
+ feature = globs.get(fname, None)
+ if feature is getattr(__future__, fname):
+ flags |= feature.compiler_flag
+ return flags
+
+def _normalize_module(module, depth=2):
+ """
+ Return the module specified by `module`. In particular:
+ - If `module` is a module, then return module.
+ - If `module` is a string, then import and return the
+ module with that name.
+ - If `module` is None, then return the calling module.
+ The calling module is assumed to be the module of
+ the stack frame at the given depth in the call stack.
+ """
+ if inspect.ismodule(module):
+ return module
+ elif isinstance(module, (str, unicode)):
+ return __import__(module, globals(), locals(), ["*"])
+ elif module is None:
+ return sys.modules[sys._getframe(depth).f_globals['__name__']]
+ else:
+ raise TypeError("Expected a module, string, or None")
+
+def _indent(s, indent=4):
+ """
+ Add the given number of space characters to the beginning every
+ non-blank line in `s`, and return the result.
+ """
+ # This regexp matches the start of non-blank lines:
+ return re.sub('(?m)^(?!$)', indent*' ', s)
+
+def _exception_traceback(exc_info):
+ """
+ Return a string containing a traceback message for the given
+ exc_info tuple (as returned by sys.exc_info()).
+ """
+ # Get a traceback message.
+ excout = StringIO()
+ exc_type, exc_val, exc_tb = exc_info
+ traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
+ return excout.getvalue()
+
+# Override some StringIO methods.
+class _SpoofOut(StringIO):
+ def getvalue(self):
+ result = StringIO.getvalue(self)
+ # If anything at all was written, make sure there's a trailing
+ # newline. There's no way for the expected output to indicate
+ # that a trailing newline is missing.
+ if result and not result.endswith("\n"):
+ result += "\n"
+ # Prevent softspace from screwing up the next test case, in
+ # case they used print with a trailing comma in an example.
+ if hasattr(self, "softspace"):
+ del self.softspace
+ return result
+
+ def truncate(self, size=None):
+ StringIO.truncate(self, size)
+ if hasattr(self, "softspace"):
+ del self.softspace
+
+# Worst-case linear-time ellipsis matching.
+def _ellipsis_match(want, got):
+ if ELLIPSIS_MARKER not in want:
+ return want == got
+
+ # Find "the real" strings.
+ ws = want.split(ELLIPSIS_MARKER)
+ assert len(ws) >= 2
+
+ # Deal with exact matches possibly needed at one or both ends.
+ startpos, endpos = 0, len(got)
+ w = ws[0]
+ if w: # starts with exact match
+ if got.startswith(w):
+ startpos = len(w)
+ del ws[0]
+ else:
+ return False
+ w = ws[-1]
+ if w: # ends with exact match
+ if got.endswith(w):
+ endpos -= len(w)
+ del ws[-1]
+ else:
+ return False
+
+ if startpos > endpos:
+ # Exact end matches required more characters than we have, as in
+ # _ellipsis_match('aa...aa', 'aaa')
+ return False
+
+ # For the rest, we only need to find the leftmost non-overlapping
+ # match for each piece. If there's no overall match that way alone,
+ # there's no overall match period.
+ for w in ws:
+ # w may be '' at times, if there are consecutive ellipses, or
+ # due to an ellipsis at the start or end of `want`. That's OK.
+ # Search for an empty string succeeds, and doesn't change startpos.
+ startpos = got.find(w, startpos, endpos)
+ if startpos < 0:
+ return False
+ startpos += len(w)
+
+ return True
+
+def _comment_line(line):
+ "Return a commented form of the given line"
+ line = line.rstrip()
+ if line:
+ return '# '+line
+ else:
+ return '#'
+
+class _OutputRedirectingPdb(pdb.Pdb):
+ """
+ A specialized version of the python debugger that redirects stdout
+ to a given stream when interacting with the user. Stdout is *not*
+ redirected when traced code is executed.
+ """
+ def __init__(self, out):
+ self.__out = out
+ pdb.Pdb.__init__(self)
+
+ def trace_dispatch(self, *args):
+ # Redirect stdout to the given stream.
+ save_stdout = sys.stdout
+ sys.stdout = self.__out
+ # Call Pdb's trace dispatch method.
+ try:
+ return pdb.Pdb.trace_dispatch(self, *args)
+ finally:
+ sys.stdout = save_stdout
+
+# [XX] Normalize with respect to os.path.pardir?
+def _module_relative_path(module, path):
+ if not inspect.ismodule(module):
+ raise TypeError, 'Expected a module: %r' % module
+ if path.startswith('/'):
+ raise ValueError, 'Module-relative files may not have absolute paths'
+
+ # Find the base directory for the path.
+ if hasattr(module, '__file__'):
+ # A normal module/package
+ basedir = os.path.split(module.__file__)[0]
+ elif module.__name__ == '__main__':
+ # An interactive session.
+ if len(sys.argv)>0 and sys.argv[0] != '':
+ basedir = os.path.split(sys.argv[0])[0]
+ else:
+ basedir = os.curdir
+ else:
+ # A module w/o __file__ (this includes builtins)
+ raise ValueError("Can't resolve paths relative to the module " +
+ module + " (it has no __file__)")
+
+ # Combine the base directory and the path.
+ return os.path.join(basedir, *(path.split('/')))
+
+######################################################################
+## 2. Example & DocTest
+######################################################################
+## - An "example" is a <source, want> pair, where "source" is a
+## fragment of source code, and "want" is the expected output for
+## "source." The Example class also includes information about
+## where the example was extracted from.
+##
+## - A "doctest" is a collection of examples, typically extracted from
+## a string (such as an object's docstring). The DocTest class also
+## includes information about where the string was extracted from.
+
+class Example:
+ """
+ A single doctest example, consisting of source code and expected
+ output. `Example` defines the following attributes:
+
+ - source: A single Python statement, always ending with a newline.
+ The constructor adds a newline if needed.
+
+ - want: The expected output from running the source code (either
+ from stdout, or a traceback in case of exception). `want` ends
+ with a newline unless it's empty, in which case it's an empty
+ string. The constructor adds a newline if needed.
+
+ - exc_msg: The exception message generated by the example, if
+ the example is expected to generate an exception; or `None` if
+ it is not expected to generate an exception. This exception
+ message is compared against the return value of
+ `traceback.format_exception_only()`. `exc_msg` ends with a
+ newline unless it's `None`. The constructor adds a newline
+ if needed.
+
+ - lineno: The line number within the DocTest string containing
+ this Example where the Example begins. This line number is
+ zero-based, with respect to the beginning of the DocTest.
+
+ - indent: The example's indentation in the DocTest string.
+ I.e., the number of space characters that preceed the
+ example's first prompt.
+
+ - options: A dictionary mapping from option flags to True or
+ False, which is used to override default options for this
+ example. Any option flags not contained in this dictionary
+ are left at their default value (as specified by the
+ DocTestRunner's optionflags). By default, no options are set.
+ """
+ def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
+ options=None):
+ # Normalize inputs.
+ if not source.endswith('\n'):
+ source += '\n'
+ if want and not want.endswith('\n'):
+ want += '\n'
+ if exc_msg is not None and not exc_msg.endswith('\n'):
+ exc_msg += '\n'
+ # Store properties.
+ self.source = source
+ self.want = want
+ self.lineno = lineno
+ self.indent = indent
+ if options is None: options = {}
+ self.options = options
+ self.exc_msg = exc_msg
+
+class DocTest:
+ """
+ A collection of doctest examples that should be run in a single
+ namespace. Each `DocTest` defines the following attributes:
+
+ - examples: the list of examples.
+
+ - globs: The namespace (aka globals) that the examples should
+ be run in.
+
+ - name: A name identifying the DocTest (typically, the name of
+ the object whose docstring this DocTest was extracted from).
+
+ - filename: The name of the file that this DocTest was extracted
+ from, or `None` if the filename is unknown.
+
+ - lineno: The line number within filename where this DocTest
+ begins, or `None` if the line number is unavailable. This
+ line number is zero-based, with respect to the beginning of
+ the file.
+
+ - docstring: The string that the examples were extracted from,
+ or `None` if the string is unavailable.
+ """
+ def __init__(self, examples, globs, name, filename, lineno, docstring):
+ """
+ Create a new DocTest containing the given examples. The
+ DocTest's globals are initialized with a copy of `globs`.
+ """
+ assert not isinstance(examples, basestring), \
+ "DocTest no longer accepts str; use DocTestParser instead"
+ self.examples = examples
+ self.docstring = docstring
+ self.globs = globs.copy()
+ self.name = name
+ self.filename = filename
+ self.lineno = lineno
+
+ def __repr__(self):
+ if len(self.examples) == 0:
+ examples = 'no examples'
+ elif len(self.examples) == 1:
+ examples = '1 example'
+ else:
+ examples = '%d examples' % len(self.examples)
+ return ('<DocTest %s from %s:%s (%s)>' %
+ (self.name, self.filename, self.lineno, examples))
+
+
+ # This lets us sort tests by name:
+ def __cmp__(self, other):
+ if not isinstance(other, DocTest):
+ return -1
+ return cmp((self.name, self.filename, self.lineno, id(self)),
+ (other.name, other.filename, other.lineno, id(other)))
+
+######################################################################
+## 3. DocTestParser
+######################################################################
+
+class DocTestParser:
+ """
+ A class used to parse strings containing doctest examples.
+ """
+ # This regular expression is used to find doctest examples in a
+ # string. It defines three groups: `source` is the source code
+ # (including leading indentation and prompts); `indent` is the
+ # indentation of the first (PS1) line of the source code; and
+ # `want` is the expected output (including leading indentation).
+ _EXAMPLE_RE = re.compile(r'''
+ # Source consists of a PS1 line followed by zero or more PS2 lines.
+ (?P<source>
+ (?:^(?P<indent> [ ]*) >>> .*) # PS1 line
+ (?:\n [ ]* \.\.\. .*)*) # PS2 lines
+ \n?
+ # Want consists of any non-blank lines that do not start with PS1.
+ (?P<want> (?:(?![ ]*$) # Not a blank line
+ (?![ ]*>>>) # Not a line starting with PS1
+ .*$\n? # But any other line
+ )*)
+ ''', re.MULTILINE | re.VERBOSE)
+
+ # A regular expression for handling `want` strings that contain
+ # expected exceptions. It divides `want` into three pieces:
+ # - the traceback header line (`hdr`)
+ # - the traceback stack (`stack`)
+ # - the exception message (`msg`), as generated by
+ # traceback.format_exception_only()
+ # `msg` may have multiple lines. We assume/require that the
+ # exception message is the first non-indented line starting with a word
+ # character following the traceback header line.
+ _EXCEPTION_RE = re.compile(r"""
+ # Grab the traceback header. Different versions of Python have
+ # said different things on the first traceback line.
+ ^(?P<hdr> Traceback\ \(
+ (?: most\ recent\ call\ last
+ | innermost\ last
+ ) \) :
+ )
+ \s* $ # toss trailing whitespace on the header.
+ (?P<stack> .*?) # don't blink: absorb stuff until...
+ ^ (?P<msg> \w+ .*) # a line *starts* with alphanum.
+ """, re.VERBOSE | re.MULTILINE | re.DOTALL)
+
+ # A callable returning a true value iff its argument is a blank line
+ # or contains a single comment.
+ _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
+
+ def parse(self, string, name='<string>'):
+ """
+ Divide the given string into examples and intervening text,
+ and return them as a list of alternating Examples and strings.
+ Line numbers for the Examples are 0-based. The optional
+ argument `name` is a name identifying this string, and is only
+ used for error messages.
+ """
+ string = string.expandtabs()
+ # If all lines begin with the same indentation, then strip it.
+ min_indent = self._min_indent(string)
+ if min_indent > 0:
+ string = '\n'.join([l[min_indent:] for l in string.split('\n')])
+
+ output = []
+ charno, lineno = 0, 0
+ # Find all doctest examples in the string:
+ for m in self._EXAMPLE_RE.finditer(string):
+ # Add the pre-example text to `output`.
+ output.append(string[charno:m.start()])
+ # Update lineno (lines before this example)
+ lineno += string.count('\n', charno, m.start())
+ # Extract info from the regexp match.
+ (source, options, want, exc_msg) = \
+ self._parse_example(m, name, lineno)
+ # Create an Example, and add it to the list.
+ if not self._IS_BLANK_OR_COMMENT(source):
+ output.append( Example(source, want, exc_msg,
+ lineno=lineno,
+ indent=min_indent+len(m.group('indent')),
+ options=options) )
+ # Update lineno (lines inside this example)
+ lineno += string.count('\n', m.start(), m.end())
+ # Update charno.
+ charno = m.end()
+ # Add any remaining post-example text to `output`.
+ output.append(string[charno:])
+ return output
+
+ def get_doctest(self, string, globs, name, filename, lineno):
+ """
+ Extract all doctest examples from the given string, and
+ collect them into a `DocTest` object.
+
+ `globs`, `name`, `filename`, and `lineno` are attributes for
+ the new `DocTest` object. See the documentation for `DocTest`
+ for more information.
+ """
+ return DocTest(self.get_examples(string, name), globs,
+ name, filename, lineno, string)
+
+ def get_examples(self, string, name='<string>'):
+ """
+ Extract all doctest examples from the given string, and return
+ them as a list of `Example` objects. Line numbers are
+ 0-based, because it's most common in doctests that nothing
+ interesting appears on the same line as opening triple-quote,
+ and so the first interesting line is called \"line 1\" then.
+
+ The optional argument `name` is a name identifying this
+ string, and is only used for error messages.
+ """
+ return [x for x in self.parse(string, name)
+ if isinstance(x, Example)]
+
+ def _parse_example(self, m, name, lineno):
+ """
+ Given a regular expression match from `_EXAMPLE_RE` (`m`),
+ return a pair `(source, want)`, where `source` is the matched
+ example's source code (with prompts and indentation stripped);
+ and `want` is the example's expected output (with indentation
+ stripped).
+
+ `name` is the string's name, and `lineno` is the line number
+ where the example starts; both are used for error messages.
+ """
+ # Get the example's indentation level.
+ indent = len(m.group('indent'))
+
+ # Divide source into lines; check that they're properly
+ # indented; and then strip their indentation & prompts.
+ source_lines = m.group('source').split('\n')
+ self._check_prompt_blank(source_lines, indent, name, lineno)
+ self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
+ source = '\n'.join([sl[indent+4:] for sl in source_lines])
+
+ # Divide want into lines; check that it's properly indented; and
+ # then strip the indentation. Spaces before the last newline should
+ # be preserved, so plain rstrip() isn't good enough.
+ want = m.group('want')
+ want_lines = want.split('\n')
+ if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
+ del want_lines[-1] # forget final newline & spaces after it
+ self._check_prefix(want_lines, ' '*indent, name,
+ lineno + len(source_lines))
+ want = '\n'.join([wl[indent:] for wl in want_lines])
+
+ # If `want` contains a traceback message, then extract it.
+ m = self._EXCEPTION_RE.match(want)
+ if m:
+ exc_msg = m.group('msg')
+ else:
+ exc_msg = None
+
+ # Extract options from the source.
+ options = self._find_options(source, name, lineno)
+
+ return source, options, want, exc_msg
+
+ # This regular expression looks for option directives in the
+ # source code of an example. Option directives are comments
+ # starting with "doctest:". Warning: this may give false
+ # positives for string-literals that contain the string
+ # "#doctest:". Eliminating these false positives would require
+ # actually parsing the string; but we limit them by ignoring any
+ # line containing "#doctest:" that is *followed* by a quote mark.
+ _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
+ re.MULTILINE)
+
+ def _find_options(self, source, name, lineno):
+ """
+ Return a dictionary containing option overrides extracted from
+ option directives in the given source string.
+
+ `name` is the string's name, and `lineno` is the line number
+ where the example starts; both are used for error messages.
+ """
+ options = {}
+ # (note: with the current regexp, this will match at most once:)
+ for m in self._OPTION_DIRECTIVE_RE.finditer(source):
+ option_strings = m.group(1).replace(',', ' ').split()
+ for option in option_strings:
+ if (option[0] not in '+-' or
+ option[1:] not in OPTIONFLAGS_BY_NAME):
+ raise ValueError('line %r of the doctest for %s '
+ 'has an invalid option: %r' %
+ (lineno+1, name, option))
+ flag = OPTIONFLAGS_BY_NAME[option[1:]]
+ options[flag] = (option[0] == '+')
+ if options and self._IS_BLANK_OR_COMMENT(source):
+ raise ValueError('line %r of the doctest for %s has an option '
+ 'directive on a line with no example: %r' %
+ (lineno, name, source))
+ return options
+
+ # This regular expression finds the indentation of every non-blank
+ # line in a string.
+ _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
+
+ def _min_indent(self, s):
+ "Return the minimum indentation of any non-blank line in `s`"
+ indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
+ if len(indents) > 0:
+ return min(indents)
+ else:
+ return 0
+
+ def _check_prompt_blank(self, lines, indent, name, lineno):
+ """
+ Given the lines of a source string (including prompts and
+ leading indentation), check to make sure that every prompt is
+ followed by a space character. If any line is not followed by
+ a space character, then raise ValueError.
+ """
+ for i, line in enumerate(lines):
+ if len(line) >= indent+4 and line[indent+3] != ' ':
+ raise ValueError('line %r of the docstring for %s '
+ 'lacks blank after %s: %r' %
+ (lineno+i+1, name,
+ line[indent:indent+3], line))
+
+ def _check_prefix(self, lines, prefix, name, lineno):
+ """
+ Check that every line in the given list starts with the given
+ prefix; if any line does not, then raise a ValueError.
+ """
+ for i, line in enumerate(lines):
+ if line and not line.startswith(prefix):
+ raise ValueError('line %r of the docstring for %s has '
+ 'inconsistent leading whitespace: %r' %
+ (lineno+i+1, name, line))
+
+
+######################################################################
+## 4. DocTest Finder
+######################################################################
+
+class DocTestFinder:
+ """
+ A class used to extract the DocTests that are relevant to a given
+ object, from its docstring and the docstrings of its contained
+ objects. Doctests can currently be extracted from the following
+ object types: modules, functions, classes, methods, staticmethods,
+ classmethods, and properties.
+ """
+
+ def __init__(self, verbose=False, parser=DocTestParser(),
+ recurse=True, _namefilter=None, exclude_empty=True):
+ """
+ Create a new doctest finder.
+
+ The optional argument `parser` specifies a class or
+ function that should be used to create new DocTest objects (or
+ objects that implement the same interface as DocTest). The
+ signature for this factory function should match the signature
+ of the DocTest constructor.
+
+ If the optional argument `recurse` is false, then `find` will
+ only examine the given object, and not any contained objects.
+
+ If the optional argument `exclude_empty` is false, then `find`
+ will include tests for objects with empty docstrings.
+ """
+ self._parser = parser
+ self._verbose = verbose
+ self._recurse = recurse
+ self._exclude_empty = exclude_empty
+ # _namefilter is undocumented, and exists only for temporary backward-
+ # compatibility support of testmod's deprecated isprivate mess.
+ self._namefilter = _namefilter
+
+ def find(self, obj, name=None, module=None, globs=None,
+ extraglobs=None):
+ """
+ Return a list of the DocTests that are defined by the given
+ object's docstring, or by any of its contained objects'
+ docstrings.
+
+ The optional parameter `module` is the module that contains
+ the given object. If the module is not specified or is None, then
+ the test finder will attempt to automatically determine the
+ correct module. The object's module is used:
+
+ - As a default namespace, if `globs` is not specified.
+ - To prevent the DocTestFinder from extracting DocTests
+ from objects that are imported from other modules.
+ - To find the name of the file containing the object.
+ - To help find the line number of the object within its
+ file.
+
+ Contained objects whose module does not match `module` are ignored.
+
+ If `module` is False, no attempt to find the module will be made.
+ This is obscure, of use mostly in tests: if `module` is False, or
+ is None but cannot be found automatically, then all objects are
+ considered to belong to the (non-existent) module, so all contained
+ objects will (recursively) be searched for doctests.
+
+ The globals for each DocTest is formed by combining `globs`
+ and `extraglobs` (bindings in `extraglobs` override bindings
+ in `globs`). A new copy of the globals dictionary is created
+ for each DocTest. If `globs` is not specified, then it
+ defaults to the module's `__dict__`, if specified, or {}
+ otherwise. If `extraglobs` is not specified, then it defaults
+ to {}.
+
+ """
+ # If name was not specified, then extract it from the object.
+ if name is None:
+ name = getattr(obj, '__name__', None)
+ if name is None:
+ raise ValueError("DocTestFinder.find: name must be given "
+ "when obj.__name__ doesn't exist: %r" %
+ (type(obj),))
+
+ # Find the module that contains the given object (if obj is
+ # a module, then module=obj.). Note: this may fail, in which
+ # case module will be None.
+ if module is False:
+ module = None
+ elif module is None:
+ module = inspect.getmodule(obj)
+
+ # Read the module's source code. This is used by
+ # DocTestFinder._find_lineno to find the line number for a
+ # given object's docstring.
+ try:
+ file = inspect.getsourcefile(obj) or inspect.getfile(obj)
+ source_lines = linecache.getlines(file)
+ if not source_lines:
+ source_lines = None
+ except TypeError:
+ source_lines = None
+
+ # Initialize globals, and merge in extraglobs.
+ if globs is None:
+ if module is None:
+ globs = {}
+ else:
+ globs = module.__dict__.copy()
+ else:
+ globs = globs.copy()
+ if extraglobs is not None:
+ globs.update(extraglobs)
+
+ # Recursively expore `obj`, extracting DocTests.
+ tests = []
+ self._find(tests, obj, name, module, source_lines, globs, {})
+ # Sort the tests by alpha order of names, for consistency in
+ # verbose-mode output. This was a feature of doctest in Pythons
+ # <= 2.3 that got lost by accident in 2.4. It was repaired in
+ # 2.4.4 and 2.5.
+ tests.sort()
+ return tests
+
+ def _filter(self, obj, prefix, base):
+ """
+ Return true if the given object should not be examined.
+ """
+ return (self._namefilter is not None and
+ self._namefilter(prefix, base))
+
+ def _from_module(self, module, object):
+ """
+ Return true if the given object is defined in the given
+ module.
+ """
+ if module is None:
+ return True
+ elif inspect.isfunction(object):
+ return module.__dict__ is object.func_globals
+ elif inspect.isclass(object):
+ # Some jython classes don't set __module__
+ return module.__name__ == getattr(object, '__module__', None)
+ elif inspect.getmodule(object) is not None:
+ return module is inspect.getmodule(object)
+ elif hasattr(object, '__module__'):
+ return module.__name__ == object.__module__
+ elif isinstance(object, property):
+ return True # [XX] no way not be sure.
+ else:
+ raise ValueError("object must be a class or function")
+
+ def _find(self, tests, obj, name, module, source_lines, globs, seen):
+ """
+ Find tests for the given object and any contained objects, and
+ add them to `tests`.
+ """
+ if self._verbose:
+ print 'Finding tests in %s' % name
+
+ # If we've already processed this object, then ignore it.
+ if id(obj) in seen:
+ return
+ seen[id(obj)] = 1
+
+ # Find a test for this object, and add it to the list of tests.
+ test = self._get_test(obj, name, module, globs, source_lines)
+ if test is not None:
+ tests.append(test)
+
+ # Look for tests in a module's contained objects.
+ if inspect.ismodule(obj) and self._recurse:
+ for valname, val in obj.__dict__.items():
+ # Check if this contained object should be ignored.
+ if self._filter(val, name, valname):
+ continue
+ valname = '%s.%s' % (name, valname)
+ # Recurse to functions & classes.
+ if ((inspect.isfunction(val) or inspect.isclass(val)) and
+ self._from_module(module, val)):
+ self._find(tests, val, valname, module, source_lines,
+ globs, seen)
+
+ # Look for tests in a module's __test__ dictionary.
+ if inspect.ismodule(obj) and self._recurse:
+ for valname, val in getattr(obj, '__test__', {}).items():
+ if not isinstance(valname, basestring):
+ raise ValueError("DocTestFinder.find: __test__ keys "
+ "must be strings: %r" %
+ (type(valname),))
+ if not (inspect.isfunction(val) or inspect.isclass(val) or
+ inspect.ismethod(val) or inspect.ismodule(val) or
+ isinstance(val, basestring)):
+ raise ValueError("DocTestFinder.find: __test__ values "
+ "must be strings, functions, methods, "
+ "classes, or modules: %r" %
+ (type(val),))
+ valname = '%s.__test__.%s' % (name, valname)
+ self._find(tests, val, valname, module, source_lines,
+ globs, seen)
+
+ # Look for tests in a class's contained objects.
+ if inspect.isclass(obj) and self._recurse:
+ for valname, val in obj.__dict__.items():
+ # Check if this contained object should be ignored.
+ if self._filter(val, name, valname):
+ continue
+ # Special handling for staticmethod/classmethod.
+ if isinstance(val, staticmethod):
+ val = getattr(obj, valname)
+ if isinstance(val, classmethod):
+ val = getattr(obj, valname).im_func
+
+ # Recurse to methods, properties, and nested classes.
+ if ((inspect.isfunction(val) or inspect.isclass(val) or
+ isinstance(val, property)) and
+ self._from_module(module, val)):
+ valname = '%s.%s' % (name, valname)
+ self._find(tests, val, valname, module, source_lines,
+ globs, seen)
+
+ def _get_test(self, obj, name, module, globs, source_lines):
+ """
+ Return a DocTest for the given object, if it defines a docstring;
+ otherwise, return None.
+ """
+ # Extract the object's docstring. If it doesn't have one,
+ # then return None (no test for this object).
+ if isinstance(obj, basestring):
+ docstring = obj
+ else:
+ try:
+ if obj.__doc__ is None:
+ docstring = ''
+ else:
+ docstring = obj.__doc__
+ if not isinstance(docstring, basestring):
+ docstring = str(docstring)
+ except (TypeError, AttributeError):
+ docstring = ''
+
+ # Find the docstring's location in the file.
+ lineno = self._find_lineno(obj, source_lines)
+
+ # Don't bother if the docstring is empty.
+ if self._exclude_empty and not docstring:
+ return None
+
+ # Return a DocTest for this object.
+ if module is None:
+ filename = None
+ else:
+ filename = getattr(module, '__file__', module.__name__)
+ if filename[-4:] in (".pyc", ".pyo"):
+ filename = filename[:-1]
+ elif sys.platform.startswith('java') and \
+ filename.endswith('$py.class'):
+ filename = '%s.py' % filename[:-9]
+ return self._parser.get_doctest(docstring, globs, name,
+ filename, lineno)
+
+ def _find_lineno(self, obj, source_lines):
+ """
+ Return a line number of the given object's docstring. Note:
+ this method assumes that the object has a docstring.
+ """
+ lineno = None
+
+ # Find the line number for modules.
+ if inspect.ismodule(obj):
+ lineno = 0
+
+ # Find the line number for classes.
+ # Note: this could be fooled if a class is defined multiple
+ # times in a single file.
+ if inspect.isclass(obj):
+ if source_lines is None:
+ return None
+ pat = re.compile(r'^\s*class\s*%s\b' %
+ getattr(obj, '__name__', '-'))
+ for i, line in enumerate(source_lines):
+ if pat.match(line):
+ lineno = i
+ break
+
+ # Find the line number for functions & methods.
+ if inspect.ismethod(obj): obj = obj.im_func
+ if inspect.isfunction(obj): obj = obj.func_code
+ if inspect.istraceback(obj): obj = obj.tb_frame
+ if inspect.isframe(obj): obj = obj.f_code
+ if inspect.iscode(obj):
+ lineno = getattr(obj, 'co_firstlineno', None)-1
+
+ # Find the line number where the docstring starts. Assume
+ # that it's the first line that begins with a quote mark.
+ # Note: this could be fooled by a multiline function
+ # signature, where a continuation line begins with a quote
+ # mark.
+ if lineno is not None:
+ if source_lines is None:
+ return lineno+1
+ pat = re.compile('(^|.*:)\s*\w*("|\')')
+ for lineno in range(lineno, len(source_lines)):
+ if pat.match(source_lines[lineno]):
+ return lineno
+
+ # We couldn't find the line number.
+ return None
+
+######################################################################
+## 5. DocTest Runner
+######################################################################
+
+class DocTestRunner:
+ # This divider string is used to separate failure messages, and to
+ # separate sections of the summary.
+ DIVIDER = "*" * 70
+
+ def __init__(self, checker=None, verbose=None, optionflags=0):
+ """
+ Create a new test runner.
+
+ Optional keyword arg `checker` is the `OutputChecker` that
+ should be used to compare the expected outputs and actual
+ outputs of doctest examples.
+
+ Optional keyword arg 'verbose' prints lots of stuff if true,
+ only failures if false; by default, it's true iff '-v' is in
+ sys.argv.
+
+ Optional argument `optionflags` can be used to control how the
+ test runner compares expected output to actual output, and how
+ it displays failures. See the documentation for `testmod` for
+ more information.
+ """
+ self._checker = checker or OutputChecker()
+ if verbose is None:
+ verbose = '-v' in sys.argv
+ self._verbose = verbose
+ self.optionflags = optionflags
+ self.original_optionflags = optionflags
+
+ # Keep track of the examples we've run.
+ self.tries = 0
+ self.failures = 0
+ self._name2ft = {}
+
+ # Create a fake output target for capturing doctest output.
+ self._fakeout = _SpoofOut()
+
+ #/////////////////////////////////////////////////////////////////
+ # Reporting methods
+ #/////////////////////////////////////////////////////////////////
+
+ def report_start(self, out, test, example):
+ """
+ Report that the test runner is about to process the given
+ example. (Only displays a message if verbose=True)
+ """
+ if self._verbose:
+ if example.want:
+ out('Trying:\n' + _indent(example.source) +
+ 'Expecting:\n' + _indent(example.want))
+ else:
+ out('Trying:\n' + _indent(example.source) +
+ 'Expecting nothing\n')
+
+ def report_success(self, out, test, example, got):
+ """
+ Report that the given example ran successfully. (Only
+ displays a message if verbose=True)
+ """
+ if self._verbose:
+ out("ok\n")
+
+ def report_failure(self, out, test, example, got):
+ """
+ Report that the given example failed.
+ """
+ out(self._failure_header(test, example) +
+ self._checker.output_difference(example, got, self.optionflags))
+
+ def report_unexpected_exception(self, out, test, example, exc_info):
+ """
+ Report that the given example raised an unexpected exception.
+ """
+ out(self._failure_header(test, example) +
+ 'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
+
+ def _failure_header(self, test, example):
+ out = [self.DIVIDER]
+ if test.filename:
+ if test.lineno is not None and example.lineno is not None:
+ lineno = test.lineno + example.lineno + 1
+ else:
+ lineno = '?'
+ out.append('File "%s", line %s, in %s' %
+ (test.filename, lineno, test.name))
+ else:
+ out.append('Line %s, in %s' % (example.lineno+1, test.name))
+ out.append('Failed example:')
+ source = example.source
+ out.append(_indent(source))
+ return '\n'.join(out)
+
+ #/////////////////////////////////////////////////////////////////
+ # DocTest Running
+ #/////////////////////////////////////////////////////////////////
+
+ def __run(self, test, compileflags, out):
+ """
+ Run the examples in `test`. Write the outcome of each example
+ with one of the `DocTestRunner.report_*` methods, using the
+ writer function `out`. `compileflags` is the set of compiler
+ flags that should be used to execute examples. Return a tuple
+ `(f, t)`, where `t` is the number of examples tried, and `f`
+ is the number of examples that failed. The examples are run
+ in the namespace `test.globs`.
+ """
+ # Keep track of the number of failures and tries.
+ failures = tries = 0
+
+ # Save the option flags (since option directives can be used
+ # to modify them).
+ original_optionflags = self.optionflags
+
+ SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
+
+ check = self._checker.check_output
+
+ # Process each example.
+ for examplenum, example in enumerate(test.examples):
+
+ # If REPORT_ONLY_FIRST_FAILURE is set, then supress
+ # reporting after the first failure.
+ quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
+ failures > 0)
+
+ # Merge in the example's options.
+ self.optionflags = original_optionflags
+ if example.options:
+ for (optionflag, val) in example.options.items():
+ if val:
+ self.optionflags |= optionflag
+ else:
+ self.optionflags &= ~optionflag
+
+ # Record that we started this example.
+ tries += 1
+ if not quiet:
+ self.report_start(out, test, example)
+
+ # Use a special filename for compile(), so we can retrieve
+ # the source code during interactive debugging (see
+ # __patched_linecache_getlines).
+ filename = '<doctest %s[%d]>' % (test.name, examplenum)
+
+ # Run the example in the given context (globs), and record
+ # any exception that gets raised. (But don't intercept
+ # keyboard interrupts.)
+ try:
+ # Don't blink! This is where the user's code gets run.
+ exec compile(example.source, filename, "single",
+ compileflags, 1) in test.globs
+ self.debugger.set_continue() # ==== Example Finished ====
+ exception = None
+ except KeyboardInterrupt:
+ raise
+ except:
+ exception = sys.exc_info()
+ self.debugger.set_continue() # ==== Example Finished ====
+
+ got = self._fakeout.getvalue() # the actual output
+ self._fakeout.truncate(0)
+ outcome = FAILURE # guilty until proved innocent or insane
+
+ # If the example executed without raising any exceptions,
+ # verify its output.
+ if exception is None:
+ if check(example.want, got, self.optionflags):
+ outcome = SUCCESS
+
+ # The example raised an exception: check if it was expected.
+ else:
+ exc_info = sys.exc_info()
+ exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
+ if not quiet:
+ got += _exception_traceback(exc_info)
+
+ # If `example.exc_msg` is None, then we weren't expecting
+ # an exception.
+ if example.exc_msg is None:
+ outcome = BOOM
+
+ # We expected an exception: see whether it matches.
+ elif check(example.exc_msg, exc_msg, self.optionflags):
+ outcome = SUCCESS
+
+ # Another chance if they didn't care about the detail.
+ elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
+ m1 = re.match(r'[^:]*:', example.exc_msg)
+ m2 = re.match(r'[^:]*:', exc_msg)
+ if m1 and m2 and check(m1.group(0), m2.group(0),
+ self.optionflags):
+ outcome = SUCCESS
+
+ # Report the outcome.
+ if outcome is SUCCESS:
+ if not quiet:
+ self.report_success(out, test, example, got)
+ elif outcome is FAILURE:
+ if not quiet:
+ self.report_failure(out, test, example, got)
+ failures += 1
+ elif outcome is BOOM:
+ if not quiet:
+ self.report_unexpected_exception(out, test, example,
+ exc_info)
+ failures += 1
+ else:
+ assert False, ("unknown outcome", outcome)
+
+ # Restore the option flags (in case they were modified)
+ self.optionflags = original_optionflags
+
+ # Record and return the number of failures and tries.
+ self.__record_outcome(test, failures, tries)
+ return failures, tries
+
+ def __record_outcome(self, test, f, t):
+ """
+ Record the fact that the given DocTest (`test`) generated `f`
+ failures out of `t` tried examples.
+ """
+ f2, t2 = self._name2ft.get(test.name, (0,0))
+ self._name2ft[test.name] = (f+f2, t+t2)
+ self.failures += f
+ self.tries += t
+
+ __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
+ r'(?P<name>[\w\.]+)'
+ r'\[(?P<examplenum>\d+)\]>$')
+ def __patched_linecache_getlines(self, filename):
+ m = self.__LINECACHE_FILENAME_RE.match(filename)
+ if m and m.group('name') == self.test.name:
+ example = self.test.examples[int(m.group('examplenum'))]
+ return example.source.splitlines(True)
+ else:
+ return self.save_linecache_getlines(filename)
+
+ def run(self, test, compileflags=None, out=None, clear_globs=True):
+ """
+ Run the examples in `test`, and display the results using the
+ writer function `out`.
+
+ The examples are run in the namespace `test.globs`. If
+ `clear_globs` is true (the default), then this namespace will
+ be cleared after the test runs, to help with garbage
+ collection. If you would like to examine the namespace after
+ the test completes, then use `clear_globs=False`.
+
+ `compileflags` gives the set of flags that should be used by
+ the Python compiler when running the examples. If not
+ specified, then it will default to the set of future-import
+ flags that apply to `globs`.
+
+ The output of each example is checked using
+ `DocTestRunner.check_output`, and the results are formatted by
+ the `DocTestRunner.report_*` methods.
+ """
+ self.test = test
+
+ if compileflags is None:
+ compileflags = _extract_future_flags(test.globs)
+
+ save_stdout = sys.stdout
+ if out is None:
+ out = save_stdout.write
+ sys.stdout = self._fakeout
+
+ # Patch pdb.set_trace to restore sys.stdout during interactive
+ # debugging (so it's not still redirected to self._fakeout).
+ # Note that the interactive output will go to *our*
+ # save_stdout, even if that's not the real sys.stdout; this
+ # allows us to write test cases for the set_trace behavior.
+ save_set_trace = pdb.set_trace
+ self.debugger = _OutputRedirectingPdb(save_stdout)
+ self.debugger.reset()
+ pdb.set_trace = self.debugger.set_trace
+
+ # Patch linecache.getlines, so we can see the example's source
+ # when we're inside the debugger.
+ self.save_linecache_getlines = linecache.getlines
+ linecache.getlines = self.__patched_linecache_getlines
+
+ try:
+ return self.__run(test, compileflags, out)
+ finally:
+ sys.stdout = save_stdout
+ pdb.set_trace = save_set_trace
+ linecache.getlines = self.save_linecache_getlines
+ if clear_globs:
+ test.globs.clear()
+
+ #/////////////////////////////////////////////////////////////////
+ # Summarization
+ #/////////////////////////////////////////////////////////////////
+ def summarize(self, verbose=None):
+ """
+ Print a summary of all the test cases that have been run by
+ this DocTestRunner, and return a tuple `(f, t)`, where `f` is
+ the total number of failed examples, and `t` is the total
+ number of tried examples.
+
+ The optional `verbose` argument controls how detailed the
+ summary is. If the verbosity is not specified, then the
+ DocTestRunner's verbosity is used.
+ """
+ if verbose is None:
+ verbose = self._verbose
+ notests = []
+ passed = []
+ failed = []
+ totalt = totalf = 0
+ for x in self._name2ft.items():
+ name, (f, t) = x
+ assert f <= t
+ totalt += t
+ totalf += f
+ if t == 0:
+ notests.append(name)
+ elif f == 0:
+ passed.append( (name, t) )
+ else:
+ failed.append(x)
+ if verbose:
+ if notests:
+ print len(notests), "items had no tests:"
+ notests.sort()
+ for thing in notests:
+ print " ", thing
+ if passed:
+ print len(passed), "items passed all tests:"
+ passed.sort()
+ for thing, count in passed:
+ print " %3d tests in %s" % (count, thing)
+ if failed:
+ print self.DIVIDER
+ print len(failed), "items had failures:"
+ failed.sort()
+ for thing, (f, t) in failed:
+ print " %3d of %3d in %s" % (f, t, thing)
+ if verbose:
+ print totalt, "tests in", len(self._name2ft), "items."
+ print totalt - totalf, "passed and", totalf, "failed."
+ if totalf:
+ print "***Test Failed***", totalf, "failures."
+ elif verbose:
+ print "Test passed."
+ return totalf, totalt
+
+ #/////////////////////////////////////////////////////////////////
+ # Backward compatibility cruft to maintain doctest.master.
+ #/////////////////////////////////////////////////////////////////
+ def merge(self, other):
+ d = self._name2ft
+ for name, (f, t) in other._name2ft.items():
+ if name in d:
+ print "*** DocTestRunner.merge: '" + name + "' in both" \
+ " testers; summing outcomes."
+ f2, t2 = d[name]
+ f = f + f2
+ t = t + t2
+ d[name] = f, t
+
+class OutputChecker:
+ """
+ A class used to check the whether the actual output from a doctest
+ example matches the expected output. `OutputChecker` defines two
+ methods: `check_output`, which compares a given pair of outputs,
+ and returns true if they match; and `output_difference`, which
+ returns a string describing the differences between two outputs.
+ """
+ def check_output(self, want, got, optionflags):
+ """
+ Return True iff the actual output from an example (`got`)
+ matches the expected output (`want`). These strings are
+ always considered to match if they are identical; but
+ depending on what option flags the test runner is using,
+ several non-exact match types are also possible. See the
+ documentation for `TestRunner` for more information about
+ option flags.
+ """
+ # Handle the common case first, for efficiency:
+ # if they're string-identical, always return true.
+ if got == want:
+ return True
+
+ # The values True and False replaced 1 and 0 as the return
+ # value for boolean comparisons in Python 2.3.
+ if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
+ if (got,want) == ("True\n", "1\n"):
+ return True
+ if (got,want) == ("False\n", "0\n"):
+ return True
+
+ # <BLANKLINE> can be used as a special sequence to signify a
+ # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
+ if not (optionflags & DONT_ACCEPT_BLANKLINE):
+ # Replace <BLANKLINE> in want with a blank line.
+ want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
+ '', want)
+ # If a line in got contains only spaces, then remove the
+ # spaces.
+ got = re.sub('(?m)^\s*?$', '', got)
+ if got == want:
+ return True
+
+ # This flag causes doctest to ignore any differences in the
+ # contents of whitespace strings. Note that this can be used
+ # in conjunction with the ELLIPSIS flag.
+ if optionflags & NORMALIZE_WHITESPACE:
+ got = ' '.join(got.split())
+ want = ' '.join(want.split())
+ if got == want:
+ return True
+
+ # The ELLIPSIS flag says to let the sequence "..." in `want`
+ # match any substring in `got`.
+ if optionflags & ELLIPSIS:
+ if _ellipsis_match(want, got):
+ return True
+
+ # We didn't find any match; return false.
+ return False
+
+ # Should we do a fancy diff?
+ def _do_a_fancy_diff(self, want, got, optionflags):
+ # Not unless they asked for a fancy diff.
+ if not optionflags & (REPORT_UDIFF |
+ REPORT_CDIFF |
+ REPORT_NDIFF):
+ return False
+
+ # If expected output uses ellipsis, a meaningful fancy diff is
+ # too hard ... or maybe not. In two real-life failures Tim saw,
+ # a diff was a major help anyway, so this is commented out.
+ # [todo] _ellipsis_match() knows which pieces do and don't match,
+ # and could be the basis for a kick-ass diff in this case.
+ ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
+ ## return False
+
+ # ndiff does intraline difference marking, so can be useful even
+ # for 1-line differences.
+ if optionflags & REPORT_NDIFF:
+ return True
+
+ # The other diff types need at least a few lines to be helpful.
+ return want.count('\n') > 2 and got.count('\n') > 2
+
+ def output_difference(self, example, got, optionflags):
+ """
+ Return a string describing the differences between the
+ expected output for a given example (`example`) and the actual
+ output (`got`). `optionflags` is the set of option flags used
+ to compare `want` and `got`.
+ """
+ want = example.want
+ # If <BLANKLINE>s are being used, then replace blank lines
+ # with <BLANKLINE> in the actual output string.
+ if not (optionflags & DONT_ACCEPT_BLANKLINE):
+ got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
+
+ # Check if we should use diff.
+ if self._do_a_fancy_diff(want, got, optionflags):
+ # Split want & got into lines.
+ want_lines = want.splitlines(True) # True == keep line ends
+ got_lines = got.splitlines(True)
+ # Use difflib to find their differences.
+ if optionflags & REPORT_UDIFF:
+ diff = difflib.unified_diff(want_lines, got_lines, n=2)
+ diff = list(diff)[2:] # strip the diff header
+ kind = 'unified diff with -expected +actual'
+ elif optionflags & REPORT_CDIFF:
+ diff = difflib.context_diff(want_lines, got_lines, n=2)
+ diff = list(diff)[2:] # strip the diff header
+ kind = 'context diff with expected followed by actual'
+ elif optionflags & REPORT_NDIFF:
+ engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
+ diff = list(engine.compare(want_lines, got_lines))
+ kind = 'ndiff with -expected +actual'
+ else:
+ assert 0, 'Bad diff option'
+ # Remove trailing whitespace on diff output.
+ diff = [line.rstrip() + '\n' for line in diff]
+ return 'Differences (%s):\n' % kind + _indent(''.join(diff))
+
+ # If we're not using diff, then simply list the expected
+ # output followed by the actual output.
+ if want and got:
+ return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
+ elif want:
+ return 'Expected:\n%sGot nothing\n' % _indent(want)
+ elif got:
+ return 'Expected nothing\nGot:\n%s' % _indent(got)
+ else:
+ return 'Expected nothing\nGot nothing\n'
+
+class DocTestFailure(Exception):
+ """A DocTest example has failed in debugging mode.
+
+ The exception instance has variables:
+
+ - test: the DocTest object being run
+
+ - excample: the Example object that failed
+
+ - got: the actual output
+ """
+ def __init__(self, test, example, got):
+ self.test = test
+ self.example = example
+ self.got = got
+
+ def __str__(self):
+ return str(self.test)
+
+class UnexpectedException(Exception):
+ """A DocTest example has encountered an unexpected exception
+
+ The exception instance has variables:
+
+ - test: the DocTest object being run
+
+ - excample: the Example object that failed
+
+ - exc_info: the exception info
+ """
+ def __init__(self, test, example, exc_info):
+ self.test = test
+ self.example = example
+ self.exc_info = exc_info
+
+ def __str__(self):
+ return str(self.test)
+
+class DebugRunner(DocTestRunner):
+
+ def run(self, test, compileflags=None, out=None, clear_globs=True):
+ r = DocTestRunner.run(self, test, compileflags, out, False)
+ if clear_globs:
+ test.globs.clear()
+ return r
+
+ def report_unexpected_exception(self, out, test, example, exc_info):
+ raise UnexpectedException(test, example, exc_info)
+
+ def report_failure(self, out, test, example, got):
+ raise DocTestFailure(test, example, got)
+
+######################################################################
+## 6. Test Functions
+######################################################################
+# These should be backwards compatible.
+
+# For backward compatibility, a global instance of a DocTestRunner
+# class, updated by testmod.
+master = None
+
+def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
+ report=True, optionflags=0, extraglobs=None,
+ raise_on_error=False, exclude_empty=False):
+ """m=None, name=None, globs=None, verbose=None, isprivate=None,
+ report=True, optionflags=0, extraglobs=None, raise_on_error=False,
+ exclude_empty=False
+
+ Test examples in docstrings in functions and classes reachable
+ from module m (or the current module if m is not supplied), starting
+ with m.__doc__. Unless isprivate is specified, private names
+ are not skipped.
+
+ Also test examples reachable from dict m.__test__ if it exists and is
+ not None. m.__test__ maps names to functions, classes and strings;
+ function and class docstrings are tested even if the name is private;
+ strings are tested directly, as if they were docstrings.
+
+ Return (#failures, #tests).
+
+ See doctest.__doc__ for an overview.
+
+ Optional keyword arg "name" gives the name of the module; by default
+ use m.__name__.
+
+ Optional keyword arg "globs" gives a dict to be used as the globals
+ when executing examples; by default, use m.__dict__. A copy of this
+ dict is actually used for each docstring, so that each docstring's
+ examples start with a clean slate.
+
+ Optional keyword arg "extraglobs" gives a dictionary that should be
+ merged into the globals that are used to execute examples. By
+ default, no extra globals are used. This is new in 2.4.
+
+ Optional keyword arg "verbose" prints lots of stuff if true, prints
+ only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+ Optional keyword arg "report" prints a summary at the end when true,
+ else prints nothing at the end. In verbose mode, the summary is
+ detailed, else very brief (in fact, empty if all tests passed).
+
+ Optional keyword arg "optionflags" or's together module constants,
+ and defaults to 0. This is new in 2.3. Possible values (see the
+ docs for details):
+
+ DONT_ACCEPT_TRUE_FOR_1
+ DONT_ACCEPT_BLANKLINE
+ NORMALIZE_WHITESPACE
+ ELLIPSIS
+ IGNORE_EXCEPTION_DETAIL
+ REPORT_UDIFF
+ REPORT_CDIFF
+ REPORT_NDIFF
+ REPORT_ONLY_FIRST_FAILURE
+
+ Optional keyword arg "raise_on_error" raises an exception on the
+ first unexpected exception or failure. This allows failures to be
+ post-mortem debugged.
+
+ Deprecated in Python 2.4:
+ Optional keyword arg "isprivate" specifies a function used to
+ determine whether a name is private. The default function is
+ treat all functions as public. Optionally, "isprivate" can be
+ set to doctest.is_private to skip over functions marked as private
+ using the underscore naming convention; see its docs for details.
+
+ Advanced tomfoolery: testmod runs methods of a local instance of
+ class doctest.Tester, then merges the results into (or creates)
+ global Tester instance doctest.master. Methods of doctest.master
+ can be called directly too, if you want to do something unusual.
+ Passing report=0 to testmod is especially useful then, to delay
+ displaying a summary. Invoke doctest.master.summarize(verbose)
+ when you're done fiddling.
+ """
+ global master
+
+ if isprivate is not None:
+ warnings.warn("the isprivate argument is deprecated; "
+ "examine DocTestFinder.find() lists instead",
+ DeprecationWarning)
+
+ # If no module was given, then use __main__.
+ if m is None:
+ # DWA - m will still be None if this wasn't invoked from the command
+ # line, in which case the following TypeError is about as good an error
+ # as we should expect
+ m = sys.modules.get('__main__')
+
+ # Check that we were actually given a module.
+ if not inspect.ismodule(m):
+ raise TypeError("testmod: module required; %r" % (m,))
+
+ # If no name was given, then use the module's name.
+ if name is None:
+ name = m.__name__
+
+ # Find, parse, and run all tests in the given module.
+ finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
+
+ if raise_on_error:
+ runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+ else:
+ runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+ for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
+ runner.run(test)
+
+ if report:
+ runner.summarize()
+
+ if master is None:
+ master = runner
+ else:
+ master.merge(runner)
+
+ return runner.failures, runner.tries
+
+def testfile(filename, module_relative=True, name=None, package=None,
+ globs=None, verbose=None, report=True, optionflags=0,
+ extraglobs=None, raise_on_error=False, parser=DocTestParser()):
+ """
+ Test examples in the given file. Return (#failures, #tests).
+
+ Optional keyword arg "module_relative" specifies how filenames
+ should be interpreted:
+
+ - If "module_relative" is True (the default), then "filename"
+ specifies a module-relative path. By default, this path is
+ relative to the calling module's directory; but if the
+ "package" argument is specified, then it is relative to that
+ package. To ensure os-independence, "filename" should use
+ "/" characters to separate path segments, and should not
+ be an absolute path (i.e., it may not begin with "/").
+
+ - If "module_relative" is False, then "filename" specifies an
+ os-specific path. The path may be absolute or relative (to
+ the current working directory).
+
+ Optional keyword arg "name" gives the name of the test; by default
+ use the file's basename.
+
+ Optional keyword argument "package" is a Python package or the
+ name of a Python package whose directory should be used as the
+ base directory for a module relative filename. If no package is
+ specified, then the calling module's directory is used as the base
+ directory for module relative filenames. It is an error to
+ specify "package" if "module_relative" is False.
+
+ Optional keyword arg "globs" gives a dict to be used as the globals
+ when executing examples; by default, use {}. A copy of this dict
+ is actually used for each docstring, so that each docstring's
+ examples start with a clean slate.
+
+ Optional keyword arg "extraglobs" gives a dictionary that should be
+ merged into the globals that are used to execute examples. By
+ default, no extra globals are used.
+
+ Optional keyword arg "verbose" prints lots of stuff if true, prints
+ only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+ Optional keyword arg "report" prints a summary at the end when true,
+ else prints nothing at the end. In verbose mode, the summary is
+ detailed, else very brief (in fact, empty if all tests passed).
+
+ Optional keyword arg "optionflags" or's together module constants,
+ and defaults to 0. Possible values (see the docs for details):
+
+ DONT_ACCEPT_TRUE_FOR_1
+ DONT_ACCEPT_BLANKLINE
+ NORMALIZE_WHITESPACE
+ ELLIPSIS
+ IGNORE_EXCEPTION_DETAIL
+ REPORT_UDIFF
+ REPORT_CDIFF
+ REPORT_NDIFF
+ REPORT_ONLY_FIRST_FAILURE
+
+ Optional keyword arg "raise_on_error" raises an exception on the
+ first unexpected exception or failure. This allows failures to be
+ post-mortem debugged.
+
+ Optional keyword arg "parser" specifies a DocTestParser (or
+ subclass) that should be used to extract tests from the files.
+
+ Advanced tomfoolery: testmod runs methods of a local instance of
+ class doctest.Tester, then merges the results into (or creates)
+ global Tester instance doctest.master. Methods of doctest.master
+ can be called directly too, if you want to do something unusual.
+ Passing report=0 to testmod is especially useful then, to delay
+ displaying a summary. Invoke doctest.master.summarize(verbose)
+ when you're done fiddling.
+ """
+ global master
+
+ if package and not module_relative:
+ raise ValueError("Package may only be specified for module-"
+ "relative paths.")
+
+ # Relativize the path
+ if module_relative:
+ package = _normalize_module(package)
+ filename = _module_relative_path(package, filename)
+
+ # If no name was given, then use the file's name.
+ if name is None:
+ name = os.path.basename(filename)
+
+ # Assemble the globals.
+ if globs is None:
+ globs = {}
+ else:
+ globs = globs.copy()
+ if extraglobs is not None:
+ globs.update(extraglobs)
+
+ if raise_on_error:
+ runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+ else:
+ runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+ # Read the file, convert it to a test, and run it.
+ s = open(filename).read()
+ test = parser.get_doctest(s, globs, name, filename, 0)
+ runner.run(test)
+
+ if report:
+ runner.summarize()
+
+ if master is None:
+ master = runner
+ else:
+ master.merge(runner)
+
+ return runner.failures, runner.tries
+
+def run_docstring_examples(f, globs, verbose=False, name="NoName",
+ compileflags=None, optionflags=0):
+ """
+ Test examples in the given object's docstring (`f`), using `globs`
+ as globals. Optional argument `name` is used in failure messages.
+ If the optional argument `verbose` is true, then generate output
+ even if there are no failures.
+
+ `compileflags` gives the set of flags that should be used by the
+ Python compiler when running the examples. If not specified, then
+ it will default to the set of future-import flags that apply to
+ `globs`.
+
+ Optional keyword arg `optionflags` specifies options for the
+ testing and output. See the documentation for `testmod` for more
+ information.
+ """
+ # Find, parse, and run all tests in the given module.
+ finder = DocTestFinder(verbose=verbose, recurse=False)
+ runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+ for test in finder.find(f, name, globs=globs):
+ runner.run(test, compileflags=compileflags)
+
+######################################################################
+## 7. Tester
+######################################################################
+# This is provided only for backwards compatibility. It's not
+# actually used in any way.
+
+class Tester:
+ def __init__(self, mod=None, globs=None, verbose=None,
+ isprivate=None, optionflags=0):
+
+ warnings.warn("class Tester is deprecated; "
+ "use class doctest.DocTestRunner instead",
+ DeprecationWarning, stacklevel=2)
+ if mod is None and globs is None:
+ raise TypeError("Tester.__init__: must specify mod or globs")
+ if mod is not None and not inspect.ismodule(mod):
+ raise TypeError("Tester.__init__: mod must be a module; %r" %
+ (mod,))
+ if globs is None:
+ globs = mod.__dict__
+ self.globs = globs
+
+ self.verbose = verbose
+ self.isprivate = isprivate
+ self.optionflags = optionflags
+ self.testfinder = DocTestFinder(_namefilter=isprivate)
+ self.testrunner = DocTestRunner(verbose=verbose,
+ optionflags=optionflags)
+
+ def runstring(self, s, name):
+ test = DocTestParser().get_doctest(s, self.globs, name, None, None)
+ if self.verbose:
+ print "Running string", name
+ (f,t) = self.testrunner.run(test)
+ if self.verbose:
+ print f, "of", t, "examples failed in string", name
+ return (f,t)
+
+ def rundoc(self, object, name=None, module=None):
+ f = t = 0
+ tests = self.testfinder.find(object, name, module=module,
+ globs=self.globs)
+ for test in tests:
+ (f2, t2) = self.testrunner.run(test)
+ (f,t) = (f+f2, t+t2)
+ return (f,t)
+
+ def rundict(self, d, name, module=None):
+ import new
+ m = new.module(name)
+ m.__dict__.update(d)
+ if module is None:
+ module = False
+ return self.rundoc(m, name, module)
+
+ def run__test__(self, d, name):
+ import new
+ m = new.module(name)
+ m.__test__ = d
+ return self.rundoc(m, name)
+
+ def summarize(self, verbose=None):
+ return self.testrunner.summarize(verbose)
+
+ def merge(self, other):
+ self.testrunner.merge(other.testrunner)
+
+######################################################################
+## 8. Unittest Support
+######################################################################
+
+_unittest_reportflags = 0
+
+def set_unittest_reportflags(flags):
+ global _unittest_reportflags
+
+ if (flags & REPORTING_FLAGS) != flags:
+ raise ValueError("Only reporting flags allowed", flags)
+ old = _unittest_reportflags
+ _unittest_reportflags = flags
+ return old
+
+
+class DocTestCase(unittest.TestCase):
+
+ def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
+ checker=None):
+
+ unittest.TestCase.__init__(self)
+ self._dt_optionflags = optionflags
+ self._dt_checker = checker
+ self._dt_test = test
+ self._dt_setUp = setUp
+ self._dt_tearDown = tearDown
+
+ def setUp(self):
+ test = self._dt_test
+
+ if self._dt_setUp is not None:
+ self._dt_setUp(test)
+
+ def tearDown(self):
+ test = self._dt_test
+
+ if self._dt_tearDown is not None:
+ self._dt_tearDown(test)
+
+ test.globs.clear()
+
+ def runTest(self):
+ test = self._dt_test
+ old = sys.stdout
+ new = StringIO()
+ optionflags = self._dt_optionflags
+
+ if not (optionflags & REPORTING_FLAGS):
+ # The option flags don't include any reporting flags,
+ # so add the default reporting flags
+ optionflags |= _unittest_reportflags
+
+ runner = DocTestRunner(optionflags=optionflags,
+ checker=self._dt_checker, verbose=False)
+
+ try:
+ runner.DIVIDER = "-"*70
+ failures, tries = runner.run(
+ test, out=new.write, clear_globs=False)
+ finally:
+ sys.stdout = old
+
+ if failures:
+ raise self.failureException(self.format_failure(new.getvalue()))
+
+ def format_failure(self, err):
+ test = self._dt_test
+ if test.lineno is None:
+ lineno = 'unknown line number'
+ else:
+ lineno = '%s' % test.lineno
+ lname = '.'.join(test.name.split('.')[-1:])
+ return ('Failed doctest test for %s\n'
+ ' File "%s", line %s, in %s\n\n%s'
+ % (test.name, test.filename, lineno, lname, err)
+ )
+
+ def debug(self):
+ self.setUp()
+ runner = DebugRunner(optionflags=self._dt_optionflags,
+ checker=self._dt_checker, verbose=False)
+ runner.run(self._dt_test)
+ self.tearDown()
+
+ def id(self):
+ return self._dt_test.name
+
+ def __repr__(self):
+ name = self._dt_test.name.split('.')
+ return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
+
+ __str__ = __repr__
+
+ def shortDescription(self):
+ return "Doctest: " + self._dt_test.name
+
+def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
+ **options):
+ """
+ Convert doctest tests for a module to a unittest test suite.
+
+ This converts each documentation string in a module that
+ contains doctest tests to a unittest test case. If any of the
+ tests in a doc string fail, then the test case fails. An exception
+ is raised showing the name of the file containing the test and a
+ (sometimes approximate) line number.
+
+ The `module` argument provides the module to be tested. The argument
+ can be either a module or a module name.
+
+ If no argument is given, the calling module is used.
+
+ A number of options may be provided as keyword arguments:
+
+ setUp
+ A set-up function. This is called before running the
+ tests in each file. The setUp function will be passed a DocTest
+ object. The setUp function can access the test globals as the
+ globs attribute of the test passed.
+
+ tearDown
+ A tear-down function. This is called after running the
+ tests in each file. The tearDown function will be passed a DocTest
+ object. The tearDown function can access the test globals as the
+ globs attribute of the test passed.
+
+ globs
+ A dictionary containing initial global variables for the tests.
+
+ optionflags
+ A set of doctest option flags expressed as an integer.
+ """
+
+ if test_finder is None:
+ test_finder = DocTestFinder()
+
+ module = _normalize_module(module)
+ tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
+ if globs is None:
+ globs = module.__dict__
+ if not tests:
+ # Why do we want to do this? Because it reveals a bug that might
+ # otherwise be hidden.
+ raise ValueError(module, "has no tests")
+
+ tests.sort()
+ suite = unittest.TestSuite()
+ for test in tests:
+ if len(test.examples) == 0:
+ continue
+ if not test.filename:
+ filename = module.__file__
+ if filename[-4:] in (".pyc", ".pyo"):
+ filename = filename[:-1]
+ elif sys.platform.startswith('java') and \
+ filename.endswith('$py.class'):
+ filename = '%s.py' % filename[:-9]
+ test.filename = filename
+ suite.addTest(DocTestCase(test, **options))
+
+ return suite
+
+class DocFileCase(DocTestCase):
+
+ def id(self):
+ return '_'.join(self._dt_test.name.split('.'))
+
+ def __repr__(self):
+ return self._dt_test.filename
+ __str__ = __repr__
+
+ def format_failure(self, err):
+ return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
+ % (self._dt_test.name, self._dt_test.filename, err)
+ )
+
+def DocFileTest(path, module_relative=True, package=None,
+ globs=None, parser=DocTestParser(), **options):
+ if globs is None:
+ globs = {}
+
+ if package and not module_relative:
+ raise ValueError("Package may only be specified for module-"
+ "relative paths.")
+
+ # Relativize the path.
+ if module_relative:
+ package = _normalize_module(package)
+ path = _module_relative_path(package, path)
+
+ # Find the file and read it.
+ name = os.path.basename(path)
+ doc = open(path).read()
+
+ # Convert it to a test, and wrap it in a DocFileCase.
+ test = parser.get_doctest(doc, globs, name, path, 0)
+ return DocFileCase(test, **options)
+
+def DocFileSuite(*paths, **kw):
+ """A unittest suite for one or more doctest files.
+
+ The path to each doctest file is given as a string; the
+ interpretation of that string depends on the keyword argument
+ "module_relative".
+
+ A number of options may be provided as keyword arguments:
+
+ module_relative
+ If "module_relative" is True, then the given file paths are
+ interpreted as os-independent module-relative paths. By
+ default, these paths are relative to the calling module's
+ directory; but if the "package" argument is specified, then
+ they are relative to that package. To ensure os-independence,
+ "filename" should use "/" characters to separate path
+ segments, and may not be an absolute path (i.e., it may not
+ begin with "/").
+
+ If "module_relative" is False, then the given file paths are
+ interpreted as os-specific paths. These paths may be absolute
+ or relative (to the current working directory).
+
+ package
+ A Python package or the name of a Python package whose directory
+ should be used as the base directory for module relative paths.
+ If "package" is not specified, then the calling module's
+ directory is used as the base directory for module relative
+ filenames. It is an error to specify "package" if
+ "module_relative" is False.
+
+ setUp
+ A set-up function. This is called before running the
+ tests in each file. The setUp function will be passed a DocTest
+ object. The setUp function can access the test globals as the
+ globs attribute of the test passed.
+
+ tearDown
+ A tear-down function. This is called after running the
+ tests in each file. The tearDown function will be passed a DocTest
+ object. The tearDown function can access the test globals as the
+ globs attribute of the test passed.
+
+ globs
+ A dictionary containing initial global variables for the tests.
+
+ optionflags
+ A set of doctest option flags expressed as an integer.
+
+ parser
+ A DocTestParser (or subclass) that should be used to extract
+ tests from the files.
+ """
+ suite = unittest.TestSuite()
+
+ # We do this here so that _normalize_module is called at the right
+ # level. If it were called in DocFileTest, then this function
+ # would be the caller and we might guess the package incorrectly.
+ if kw.get('module_relative', True):
+ kw['package'] = _normalize_module(kw.get('package'))
+
+ for path in paths:
+ suite.addTest(DocFileTest(path, **kw))
+
+ return suite
+
+######################################################################
+## 9. Debugging Support
+######################################################################
+
+def script_from_examples(s):
+ output = []
+ for piece in DocTestParser().parse(s):
+ if isinstance(piece, Example):
+ # Add the example's source code (strip trailing NL)
+ output.append(piece.source[:-1])
+ # Add the expected output:
+ want = piece.want
+ if want:
+ output.append('# Expected:')
+ output += ['## '+l for l in want.split('\n')[:-1]]
+ else:
+ # Add non-example text.
+ output += [_comment_line(l)
+ for l in piece.split('\n')[:-1]]
+
+ # Trim junk on both ends.
+ while output and output[-1] == '#':
+ output.pop()
+ while output and output[0] == '#':
+ output.pop(0)
+ # Combine the output, and return it.
+ # Add a courtesy newline to prevent exec from choking (see bug #1172785)
+ return '\n'.join(output) + '\n'
+
+def testsource(module, name):
+ """Extract the test sources from a doctest docstring as a script.
+
+ Provide the module (or dotted name of the module) containing the
+ test to be debugged and the name (within the module) of the object
+ with the doc string with tests to be debugged.
+ """
+ module = _normalize_module(module)
+ tests = DocTestFinder().find(module)
+ test = [t for t in tests if t.name == name]
+ if not test:
+ raise ValueError(name, "not found in tests")
+ test = test[0]
+ testsrc = script_from_examples(test.docstring)
+ return testsrc
+
+def debug_src(src, pm=False, globs=None):
+ """Debug a single doctest docstring, in argument `src`'"""
+ testsrc = script_from_examples(src)
+ debug_script(testsrc, pm, globs)
+
+def debug_script(src, pm=False, globs=None):
+ "Debug a test script. `src` is the script, as a string."
+ import pdb
+
+ # Note that tempfile.NameTemporaryFile() cannot be used. As the
+ # docs say, a file so created cannot be opened by name a second time
+ # on modern Windows boxes, and execfile() needs to open it.
+ srcfilename = tempfile.mktemp(".py", "doctestdebug")
+ f = open(srcfilename, 'w')
+ f.write(src)
+ f.close()
+
+ try:
+ if globs:
+ globs = globs.copy()
+ else:
+ globs = {}
+
+ if pm:
+ try:
+ execfile(srcfilename, globs, globs)
+ except:
+ print sys.exc_info()[1]
+ pdb.post_mortem(sys.exc_info()[2])
+ else:
+ # Note that %r is vital here. '%s' instead can, e.g., cause
+ # backslashes to get treated as metacharacters on Windows.
+ pdb.run("execfile(%r)" % srcfilename, globs, globs)
+
+ finally:
+ os.remove(srcfilename)
+
+def debug(module, name, pm=False):
+ """Debug a single doctest docstring.
+
+ Provide the module (or dotted name of the module) containing the
+ test to be debugged and the name (within the module) of the object
+ with the docstring with tests to be debugged.
+ """
+ module = _normalize_module(module)
+ testsrc = testsource(module, name)
+ debug_script(testsrc, pm, module.__dict__)
+
+
+__test__ = {}
diff --git a/lib/spack/external/nose/failure.py b/lib/spack/external/nose/failure.py
new file mode 100644
index 0000000000..c5fabfda5e
--- /dev/null
+++ b/lib/spack/external/nose/failure.py
@@ -0,0 +1,42 @@
+import logging
+import unittest
+from traceback import format_tb
+from nose.pyversion import is_base_exception
+
+log = logging.getLogger(__name__)
+
+
+__all__ = ['Failure']
+
+
+class Failure(unittest.TestCase):
+ """Unloadable or unexecutable test.
+
+ A Failure case is placed in a test suite to indicate the presence of a
+ test that could not be loaded or executed. A common example is a test
+ module that fails to import.
+
+ """
+ __test__ = False # do not collect
+ def __init__(self, exc_class, exc_val, tb=None, address=None):
+ log.debug("A failure! %s %s %s", exc_class, exc_val, format_tb(tb))
+ self.exc_class = exc_class
+ self.exc_val = exc_val
+ self.tb = tb
+ self._address = address
+ unittest.TestCase.__init__(self)
+
+ def __str__(self):
+ return "Failure: %s (%s)" % (
+ getattr(self.exc_class, '__name__', self.exc_class), self.exc_val)
+
+ def address(self):
+ return self._address
+
+ def runTest(self):
+ if self.tb is not None:
+ if is_base_exception(self.exc_val):
+ raise self.exc_val, None, self.tb
+ raise self.exc_class, self.exc_val, self.tb
+ else:
+ raise self.exc_class(self.exc_val)
diff --git a/lib/spack/external/nose/importer.py b/lib/spack/external/nose/importer.py
new file mode 100644
index 0000000000..e677658ce6
--- /dev/null
+++ b/lib/spack/external/nose/importer.py
@@ -0,0 +1,167 @@
+"""Implements an importer that looks only in specific path (ignoring
+sys.path), and uses a per-path cache in addition to sys.modules. This is
+necessary because test modules in different directories frequently have the
+same names, which means that the first loaded would mask the rest when using
+the builtin importer.
+"""
+import logging
+import os
+import sys
+from nose.config import Config
+
+from imp import find_module, load_module, acquire_lock, release_lock
+
+log = logging.getLogger(__name__)
+
+try:
+ _samefile = os.path.samefile
+except AttributeError:
+ def _samefile(src, dst):
+ return (os.path.normcase(os.path.realpath(src)) ==
+ os.path.normcase(os.path.realpath(dst)))
+
+
+class Importer(object):
+ """An importer class that does only path-specific imports. That
+ is, the given module is not searched for on sys.path, but only at
+ the path or in the directory specified.
+ """
+ def __init__(self, config=None):
+ if config is None:
+ config = Config()
+ self.config = config
+
+ def importFromPath(self, path, fqname):
+ """Import a dotted-name package whose tail is at path. In other words,
+ given foo.bar and path/to/foo/bar.py, import foo from path/to/foo then
+ bar from path/to/foo/bar, returning bar.
+ """
+ # find the base dir of the package
+ path_parts = os.path.normpath(os.path.abspath(path)).split(os.sep)
+ name_parts = fqname.split('.')
+ if path_parts[-1] == '__init__.py':
+ path_parts.pop()
+ path_parts = path_parts[:-(len(name_parts))]
+ dir_path = os.sep.join(path_parts)
+ # then import fqname starting from that dir
+ return self.importFromDir(dir_path, fqname)
+
+ def importFromDir(self, dir, fqname):
+ """Import a module *only* from path, ignoring sys.path and
+ reloading if the version in sys.modules is not the one we want.
+ """
+ dir = os.path.normpath(os.path.abspath(dir))
+ log.debug("Import %s from %s", fqname, dir)
+
+ # FIXME reimplement local per-dir cache?
+
+ # special case for __main__
+ if fqname == '__main__':
+ return sys.modules[fqname]
+
+ if self.config.addPaths:
+ add_path(dir, self.config)
+
+ path = [dir]
+ parts = fqname.split('.')
+ part_fqname = ''
+ mod = parent = fh = None
+
+ for part in parts:
+ if part_fqname == '':
+ part_fqname = part
+ else:
+ part_fqname = "%s.%s" % (part_fqname, part)
+ try:
+ acquire_lock()
+ log.debug("find module part %s (%s) in %s",
+ part, part_fqname, path)
+ fh, filename, desc = find_module(part, path)
+ old = sys.modules.get(part_fqname)
+ if old is not None:
+ # test modules frequently have name overlap; make sure
+ # we get a fresh copy of anything we are trying to load
+ # from a new path
+ log.debug("sys.modules has %s as %s", part_fqname, old)
+ if (self.sameModule(old, filename)
+ or (self.config.firstPackageWins and
+ getattr(old, '__path__', None))):
+ mod = old
+ else:
+ del sys.modules[part_fqname]
+ mod = load_module(part_fqname, fh, filename, desc)
+ else:
+ mod = load_module(part_fqname, fh, filename, desc)
+ finally:
+ if fh:
+ fh.close()
+ release_lock()
+ if parent:
+ setattr(parent, part, mod)
+ if hasattr(mod, '__path__'):
+ path = mod.__path__
+ parent = mod
+ return mod
+
+ def _dirname_if_file(self, filename):
+ # We only take the dirname if we have a path to a non-dir,
+ # because taking the dirname of a symlink to a directory does not
+ # give the actual directory parent.
+ if os.path.isdir(filename):
+ return filename
+ else:
+ return os.path.dirname(filename)
+
+ def sameModule(self, mod, filename):
+ mod_paths = []
+ if hasattr(mod, '__path__'):
+ for path in mod.__path__:
+ mod_paths.append(self._dirname_if_file(path))
+ elif hasattr(mod, '__file__'):
+ mod_paths.append(self._dirname_if_file(mod.__file__))
+ else:
+ # builtin or other module-like object that
+ # doesn't have __file__; must be new
+ return False
+ new_path = self._dirname_if_file(filename)
+ for mod_path in mod_paths:
+ log.debug(
+ "module already loaded? mod: %s new: %s",
+ mod_path, new_path)
+ if _samefile(mod_path, new_path):
+ return True
+ return False
+
+
+def add_path(path, config=None):
+ """Ensure that the path, or the root of the current package (if
+ path is in a package), is in sys.path.
+ """
+
+ # FIXME add any src-looking dirs seen too... need to get config for that
+
+ log.debug('Add path %s' % path)
+ if not path:
+ return []
+ added = []
+ parent = os.path.dirname(path)
+ if (parent
+ and os.path.exists(os.path.join(path, '__init__.py'))):
+ added.extend(add_path(parent, config))
+ elif not path in sys.path:
+ log.debug("insert %s into sys.path", path)
+ sys.path.insert(0, path)
+ added.append(path)
+ if config and config.srcDirs:
+ for dirname in config.srcDirs:
+ dirpath = os.path.join(path, dirname)
+ if os.path.isdir(dirpath):
+ sys.path.insert(0, dirpath)
+ added.append(dirpath)
+ return added
+
+
+def remove_path(path):
+ log.debug('Remove path %s' % path)
+ if path in sys.path:
+ sys.path.remove(path)
diff --git a/lib/spack/external/nose/inspector.py b/lib/spack/external/nose/inspector.py
new file mode 100644
index 0000000000..a6c4a3e3b6
--- /dev/null
+++ b/lib/spack/external/nose/inspector.py
@@ -0,0 +1,207 @@
+"""Simple traceback introspection. Used to add additional information to
+AssertionErrors in tests, so that failure messages may be more informative.
+"""
+import inspect
+import logging
+import re
+import sys
+import textwrap
+import tokenize
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+log = logging.getLogger(__name__)
+
+def inspect_traceback(tb):
+ """Inspect a traceback and its frame, returning source for the expression
+ where the exception was raised, with simple variable replacement performed
+ and the line on which the exception was raised marked with '>>'
+ """
+ log.debug('inspect traceback %s', tb)
+
+ # we only want the innermost frame, where the exception was raised
+ while tb.tb_next:
+ tb = tb.tb_next
+
+ frame = tb.tb_frame
+ lines, exc_line = tbsource(tb)
+
+ # figure out the set of lines to grab.
+ inspect_lines, mark_line = find_inspectable_lines(lines, exc_line)
+ src = StringIO(textwrap.dedent(''.join(inspect_lines)))
+ exp = Expander(frame.f_locals, frame.f_globals)
+
+ while inspect_lines:
+ try:
+ for tok in tokenize.generate_tokens(src.readline):
+ exp(*tok)
+ except tokenize.TokenError, e:
+ # this can happen if our inspectable region happens to butt up
+ # against the end of a construct like a docstring with the closing
+ # """ on separate line
+ log.debug("Tokenizer error: %s", e)
+ inspect_lines.pop(0)
+ mark_line -= 1
+ src = StringIO(textwrap.dedent(''.join(inspect_lines)))
+ exp = Expander(frame.f_locals, frame.f_globals)
+ continue
+ break
+ padded = []
+ if exp.expanded_source:
+ exp_lines = exp.expanded_source.split('\n')
+ ep = 0
+ for line in exp_lines:
+ if ep == mark_line:
+ padded.append('>> ' + line)
+ else:
+ padded.append(' ' + line)
+ ep += 1
+ return '\n'.join(padded)
+
+
+def tbsource(tb, context=6):
+ """Get source from a traceback object.
+
+ A tuple of two things is returned: a list of lines of context from
+ the source code, and the index of the current line within that list.
+ The optional second argument specifies the number of lines of context
+ to return, which are centered around the current line.
+
+ .. Note ::
+ This is adapted from inspect.py in the python 2.4 standard library,
+ since a bug in the 2.3 version of inspect prevents it from correctly
+ locating source lines in a traceback frame.
+ """
+
+ lineno = tb.tb_lineno
+ frame = tb.tb_frame
+
+ if context > 0:
+ start = lineno - 1 - context//2
+ log.debug("lineno: %s start: %s", lineno, start)
+
+ try:
+ lines, dummy = inspect.findsource(frame)
+ except IOError:
+ lines, index = [''], 0
+ else:
+ all_lines = lines
+ start = max(start, 1)
+ start = max(0, min(start, len(lines) - context))
+ lines = lines[start:start+context]
+ index = lineno - 1 - start
+
+ # python 2.5 compat: if previous line ends in a continuation,
+ # decrement start by 1 to match 2.4 behavior
+ if sys.version_info >= (2, 5) and index > 0:
+ while lines[index-1].strip().endswith('\\'):
+ start -= 1
+ lines = all_lines[start:start+context]
+ else:
+ lines, index = [''], 0
+ log.debug("tbsource lines '''%s''' around index %s", lines, index)
+ return (lines, index)
+
+
+def find_inspectable_lines(lines, pos):
+ """Find lines in home that are inspectable.
+
+ Walk back from the err line up to 3 lines, but don't walk back over
+ changes in indent level.
+
+ Walk forward up to 3 lines, counting \ separated lines as 1. Don't walk
+ over changes in indent level (unless part of an extended line)
+ """
+ cnt = re.compile(r'\\[\s\n]*$')
+ df = re.compile(r':[\s\n]*$')
+ ind = re.compile(r'^(\s*)')
+ toinspect = []
+ home = lines[pos]
+ home_indent = ind.match(home).groups()[0]
+
+ before = lines[max(pos-3, 0):pos]
+ before.reverse()
+ after = lines[pos+1:min(pos+4, len(lines))]
+
+ for line in before:
+ if ind.match(line).groups()[0] == home_indent:
+ toinspect.append(line)
+ else:
+ break
+ toinspect.reverse()
+ toinspect.append(home)
+ home_pos = len(toinspect)-1
+ continued = cnt.search(home)
+ for line in after:
+ if ((continued or ind.match(line).groups()[0] == home_indent)
+ and not df.search(line)):
+ toinspect.append(line)
+ continued = cnt.search(line)
+ else:
+ break
+ log.debug("Inspecting lines '''%s''' around %s", toinspect, home_pos)
+ return toinspect, home_pos
+
+
+class Expander:
+ """Simple expression expander. Uses tokenize to find the names and
+ expands any that can be looked up in the frame.
+ """
+ def __init__(self, locals, globals):
+ self.locals = locals
+ self.globals = globals
+ self.lpos = None
+ self.expanded_source = ''
+
+ def __call__(self, ttype, tok, start, end, line):
+ # TODO
+ # deal with unicode properly
+
+ # TODO
+ # Dealing with instance members
+ # always keep the last thing seen
+ # if the current token is a dot,
+ # get ready to getattr(lastthing, this thing) on the
+ # next call.
+
+ if self.lpos is not None:
+ if start[1] >= self.lpos:
+ self.expanded_source += ' ' * (start[1]-self.lpos)
+ elif start[1] < self.lpos:
+ # newline, indent correctly
+ self.expanded_source += ' ' * start[1]
+ self.lpos = end[1]
+
+ if ttype == tokenize.INDENT:
+ pass
+ elif ttype == tokenize.NAME:
+ # Clean this junk up
+ try:
+ val = self.locals[tok]
+ if callable(val):
+ val = tok
+ else:
+ val = repr(val)
+ except KeyError:
+ try:
+ val = self.globals[tok]
+ if callable(val):
+ val = tok
+ else:
+ val = repr(val)
+
+ except KeyError:
+ val = tok
+ # FIXME... not sure how to handle things like funcs, classes
+ # FIXME this is broken for some unicode strings
+ self.expanded_source += val
+ else:
+ self.expanded_source += tok
+ # if this is the end of the line and the line ends with
+ # \, then tack a \ and newline onto the output
+ # print line[end[1]:]
+ if re.match(r'\s+\\\n', line[end[1]:]):
+ self.expanded_source += ' \\\n'
diff --git a/lib/spack/external/nose/loader.py b/lib/spack/external/nose/loader.py
new file mode 100644
index 0000000000..3744e54ff6
--- /dev/null
+++ b/lib/spack/external/nose/loader.py
@@ -0,0 +1,623 @@
+"""
+Test Loader
+-----------
+
+nose's test loader implements the same basic functionality as its
+superclass, unittest.TestLoader, but extends it by more liberal
+interpretations of what may be a test and how a test may be named.
+"""
+from __future__ import generators
+
+import logging
+import os
+import sys
+import unittest
+import types
+from inspect import isfunction
+from nose.pyversion import unbound_method, ismethod
+from nose.case import FunctionTestCase, MethodTestCase
+from nose.failure import Failure
+from nose.config import Config
+from nose.importer import Importer, add_path, remove_path
+from nose.selector import defaultSelector, TestAddress
+from nose.util import func_lineno, getpackage, isclass, isgenerator, \
+ ispackage, regex_last_key, resolve_name, transplant_func, \
+ transplant_class, test_address
+from nose.suite import ContextSuiteFactory, ContextList, LazySuite
+from nose.pyversion import sort_list, cmp_to_key
+
+
+log = logging.getLogger(__name__)
+#log.setLevel(logging.DEBUG)
+
+# for efficiency and easier mocking
+op_normpath = os.path.normpath
+op_abspath = os.path.abspath
+op_join = os.path.join
+op_isdir = os.path.isdir
+op_isfile = os.path.isfile
+
+
+__all__ = ['TestLoader', 'defaultTestLoader']
+
+
+class TestLoader(unittest.TestLoader):
+ """Test loader that extends unittest.TestLoader to:
+
+ * Load tests from test-like functions and classes that are not
+ unittest.TestCase subclasses
+ * Find and load test modules in a directory
+ * Support tests that are generators
+ * Support easy extensions of or changes to that behavior through plugins
+ """
+ config = None
+ importer = None
+ workingDir = None
+ selector = None
+ suiteClass = None
+
+ def __init__(self, config=None, importer=None, workingDir=None,
+ selector=None):
+ """Initialize a test loader.
+
+ Parameters (all optional):
+
+ * config: provide a `nose.config.Config`_ or other config class
+ instance; if not provided a `nose.config.Config`_ with
+ default values is used.
+ * importer: provide an importer instance that implements
+ `importFromPath`. If not provided, a
+ `nose.importer.Importer`_ is used.
+ * workingDir: the directory to which file and module names are
+ relative. If not provided, assumed to be the current working
+ directory.
+ * selector: a selector class or instance. If a class is
+ provided, it will be instantiated with one argument, the
+ current config. If not provided, a `nose.selector.Selector`_
+ is used.
+ """
+ if config is None:
+ config = Config()
+ if importer is None:
+ importer = Importer(config=config)
+ if workingDir is None:
+ workingDir = config.workingDir
+ if selector is None:
+ selector = defaultSelector(config)
+ elif isclass(selector):
+ selector = selector(config)
+ self.config = config
+ self.importer = importer
+ self.workingDir = op_normpath(op_abspath(workingDir))
+ self.selector = selector
+ if config.addPaths:
+ add_path(workingDir, config)
+ self.suiteClass = ContextSuiteFactory(config=config)
+
+ self._visitedPaths = set([])
+
+ unittest.TestLoader.__init__(self)
+
+ def getTestCaseNames(self, testCaseClass):
+ """Override to select with selector, unless
+ config.getTestCaseNamesCompat is True
+ """
+ if self.config.getTestCaseNamesCompat:
+ return unittest.TestLoader.getTestCaseNames(self, testCaseClass)
+
+ def wanted(attr, cls=testCaseClass, sel=self.selector):
+ item = getattr(cls, attr, None)
+ if isfunction(item):
+ item = unbound_method(cls, item)
+ elif not ismethod(item):
+ return False
+ return sel.wantMethod(item)
+
+ cases = filter(wanted, dir(testCaseClass))
+
+ # add runTest if nothing else picked
+ if not cases and hasattr(testCaseClass, 'runTest'):
+ cases = ['runTest']
+ if self.sortTestMethodsUsing:
+ sort_list(cases, cmp_to_key(self.sortTestMethodsUsing))
+ return cases
+
+ def _haveVisited(self, path):
+ # For cases where path is None, we always pretend we haven't visited
+ # them.
+ if path is None:
+ return False
+
+ return path in self._visitedPaths
+
+ def _addVisitedPath(self, path):
+ if path is not None:
+ self._visitedPaths.add(path)
+
+ def loadTestsFromDir(self, path):
+ """Load tests from the directory at path. This is a generator
+ -- each suite of tests from a module or other file is yielded
+ and is expected to be executed before the next file is
+ examined.
+ """
+ log.debug("load from dir %s", path)
+ plugins = self.config.plugins
+ plugins.beforeDirectory(path)
+ if self.config.addPaths:
+ paths_added = add_path(path, self.config)
+
+ entries = os.listdir(path)
+ sort_list(entries, regex_last_key(self.config.testMatch))
+ for entry in entries:
+ # this hard-coded initial-dot test will be removed:
+ # http://code.google.com/p/python-nose/issues/detail?id=82
+ if entry.startswith('.'):
+ continue
+ entry_path = op_abspath(op_join(path, entry))
+ is_file = op_isfile(entry_path)
+ wanted = False
+ if is_file:
+ is_dir = False
+ wanted = self.selector.wantFile(entry_path)
+ else:
+ is_dir = op_isdir(entry_path)
+ if is_dir:
+ # this hard-coded initial-underscore test will be removed:
+ # http://code.google.com/p/python-nose/issues/detail?id=82
+ if entry.startswith('_'):
+ continue
+ wanted = self.selector.wantDirectory(entry_path)
+ is_package = ispackage(entry_path)
+
+ # Python 3.3 now implements PEP 420: Implicit Namespace Packages.
+ # As a result, it's now possible that parent paths that have a
+ # segment with the same basename as our package ends up
+ # in module.__path__. So we have to keep track of what we've
+ # visited, and not-revisit them again.
+ if wanted and not self._haveVisited(entry_path):
+ self._addVisitedPath(entry_path)
+ if is_file:
+ plugins.beforeContext()
+ if entry.endswith('.py'):
+ yield self.loadTestsFromName(
+ entry_path, discovered=True)
+ else:
+ yield self.loadTestsFromFile(entry_path)
+ plugins.afterContext()
+ elif is_package:
+ # Load the entry as a package: given the full path,
+ # loadTestsFromName() will figure it out
+ yield self.loadTestsFromName(
+ entry_path, discovered=True)
+ else:
+ # Another test dir in this one: recurse lazily
+ yield self.suiteClass(
+ lambda: self.loadTestsFromDir(entry_path))
+ tests = []
+ for test in plugins.loadTestsFromDir(path):
+ tests.append(test)
+ # TODO: is this try/except needed?
+ try:
+ if tests:
+ yield self.suiteClass(tests)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ yield self.suiteClass([Failure(*sys.exc_info())])
+
+ # pop paths
+ if self.config.addPaths:
+ for p in paths_added:
+ remove_path(p)
+ plugins.afterDirectory(path)
+
+ def loadTestsFromFile(self, filename):
+ """Load tests from a non-module file. Default is to raise a
+ ValueError; plugins may implement `loadTestsFromFile` to
+ provide a list of tests loaded from the file.
+ """
+ log.debug("Load from non-module file %s", filename)
+ try:
+ tests = [test for test in
+ self.config.plugins.loadTestsFromFile(filename)]
+ if tests:
+ # Plugins can yield False to indicate that they were
+ # unable to load tests from a file, but it was not an
+ # error -- the file just had no tests to load.
+ tests = filter(None, tests)
+ return self.suiteClass(tests)
+ else:
+ # Nothing was able to even try to load from this file
+ open(filename, 'r').close() # trigger os error
+ raise ValueError("Unable to load tests from file %s"
+ % filename)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ exc = sys.exc_info()
+ return self.suiteClass(
+ [Failure(exc[0], exc[1], exc[2],
+ address=(filename, None, None))])
+
+ def loadTestsFromGenerator(self, generator, module):
+ """Lazy-load tests from a generator function. The generator function
+ may yield either:
+
+ * a callable, or
+ * a function name resolvable within the same module
+ """
+ def generate(g=generator, m=module):
+ try:
+ for test in g():
+ test_func, arg = self.parseGeneratedTest(test)
+ if not callable(test_func):
+ test_func = getattr(m, test_func)
+ yield FunctionTestCase(test_func, arg=arg, descriptor=g)
+ except KeyboardInterrupt:
+ raise
+ except:
+ exc = sys.exc_info()
+ yield Failure(exc[0], exc[1], exc[2],
+ address=test_address(generator))
+ return self.suiteClass(generate, context=generator, can_split=False)
+
+ def loadTestsFromGeneratorMethod(self, generator, cls):
+ """Lazy-load tests from a generator method.
+
+ This is more complicated than loading from a generator function,
+ since a generator method may yield:
+
+ * a function
+ * a bound or unbound method, or
+ * a method name
+ """
+ # convert the unbound generator method
+ # into a bound method so it can be called below
+ if hasattr(generator, 'im_class'):
+ cls = generator.im_class
+ inst = cls()
+ method = generator.__name__
+ generator = getattr(inst, method)
+
+ def generate(g=generator, c=cls):
+ try:
+ for test in g():
+ test_func, arg = self.parseGeneratedTest(test)
+ if not callable(test_func):
+ test_func = unbound_method(c, getattr(c, test_func))
+ if ismethod(test_func):
+ yield MethodTestCase(test_func, arg=arg, descriptor=g)
+ elif callable(test_func):
+ # In this case we're forcing the 'MethodTestCase'
+ # to run the inline function as its test call,
+ # but using the generator method as the 'method of
+ # record' (so no need to pass it as the descriptor)
+ yield MethodTestCase(g, test=test_func, arg=arg)
+ else:
+ yield Failure(
+ TypeError,
+ "%s is not a callable or method" % test_func)
+ except KeyboardInterrupt:
+ raise
+ except:
+ exc = sys.exc_info()
+ yield Failure(exc[0], exc[1], exc[2],
+ address=test_address(generator))
+ return self.suiteClass(generate, context=generator, can_split=False)
+
+ def loadTestsFromModule(self, module, path=None, discovered=False):
+ """Load all tests from module and return a suite containing
+ them. If the module has been discovered and is not test-like,
+ the suite will be empty by default, though plugins may add
+ their own tests.
+ """
+ log.debug("Load from module %s", module)
+ tests = []
+ test_classes = []
+ test_funcs = []
+ # For *discovered* modules, we only load tests when the module looks
+ # testlike. For modules we've been directed to load, we always
+ # look for tests. (discovered is set to True by loadTestsFromDir)
+ if not discovered or self.selector.wantModule(module):
+ for item in dir(module):
+ test = getattr(module, item, None)
+ # print "Check %s (%s) in %s" % (item, test, module.__name__)
+ if isclass(test):
+ if self.selector.wantClass(test):
+ test_classes.append(test)
+ elif isfunction(test) and self.selector.wantFunction(test):
+ test_funcs.append(test)
+ sort_list(test_classes, lambda x: x.__name__)
+ sort_list(test_funcs, func_lineno)
+ tests = map(lambda t: self.makeTest(t, parent=module),
+ test_classes + test_funcs)
+
+ # Now, descend into packages
+ # FIXME can or should this be lazy?
+ # is this syntax 2.2 compatible?
+ module_paths = getattr(module, '__path__', [])
+
+ if path:
+ path = os.path.normcase(os.path.realpath(path))
+
+ for module_path in module_paths:
+ log.debug("Load tests from module path %s?", module_path)
+ log.debug("path: %s os.path.realpath(%s): %s",
+ path, os.path.normcase(module_path),
+ os.path.realpath(os.path.normcase(module_path)))
+ if (self.config.traverseNamespace or not path) or \
+ os.path.realpath(
+ os.path.normcase(module_path)).startswith(path):
+ # Egg files can be on sys.path, so make sure the path is a
+ # directory before trying to load from it.
+ if os.path.isdir(module_path):
+ tests.extend(self.loadTestsFromDir(module_path))
+
+ for test in self.config.plugins.loadTestsFromModule(module, path):
+ tests.append(test)
+
+ return self.suiteClass(ContextList(tests, context=module))
+
+ def loadTestsFromName(self, name, module=None, discovered=False):
+ """Load tests from the entity with the given name.
+
+ The name may indicate a file, directory, module, or any object
+ within a module. See `nose.util.split_test_name` for details on
+ test name parsing.
+ """
+ # FIXME refactor this method into little bites?
+ log.debug("load from %s (%s)", name, module)
+
+ suite = self.suiteClass
+
+ # give plugins first crack
+ plug_tests = self.config.plugins.loadTestsFromName(name, module)
+ if plug_tests:
+ return suite(plug_tests)
+
+ addr = TestAddress(name, workingDir=self.workingDir)
+ if module:
+ # Two cases:
+ # name is class.foo
+ # The addr will be incorrect, since it thinks class.foo is
+ # a dotted module name. It's actually a dotted attribute
+ # name. In this case we want to use the full submitted
+ # name as the name to load from the module.
+ # name is module:class.foo
+ # The addr will be correct. The part we want is the part after
+ # the :, which is in addr.call.
+ if addr.call:
+ name = addr.call
+ parent, obj = self.resolve(name, module)
+ if (isclass(parent)
+ and getattr(parent, '__module__', None) != module.__name__
+ and not isinstance(obj, Failure)):
+ parent = transplant_class(parent, module.__name__)
+ obj = getattr(parent, obj.__name__)
+ log.debug("parent %s obj %s module %s", parent, obj, module)
+ if isinstance(obj, Failure):
+ return suite([obj])
+ else:
+ return suite(ContextList([self.makeTest(obj, parent)],
+ context=parent))
+ else:
+ if addr.module:
+ try:
+ if addr.filename is None:
+ module = resolve_name(addr.module)
+ else:
+ self.config.plugins.beforeImport(
+ addr.filename, addr.module)
+ # FIXME: to support module.name names,
+ # do what resolve-name does and keep trying to
+ # import, popping tail of module into addr.call,
+ # until we either get an import or run out of
+ # module parts
+ try:
+ module = self.importer.importFromPath(
+ addr.filename, addr.module)
+ finally:
+ self.config.plugins.afterImport(
+ addr.filename, addr.module)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ exc = sys.exc_info()
+ return suite([Failure(exc[0], exc[1], exc[2],
+ address=addr.totuple())])
+ if addr.call:
+ return self.loadTestsFromName(addr.call, module)
+ else:
+ return self.loadTestsFromModule(
+ module, addr.filename,
+ discovered=discovered)
+ elif addr.filename:
+ path = addr.filename
+ if addr.call:
+ package = getpackage(path)
+ if package is None:
+ return suite([
+ Failure(ValueError,
+ "Can't find callable %s in file %s: "
+ "file is not a python module" %
+ (addr.call, path),
+ address=addr.totuple())])
+ return self.loadTestsFromName(addr.call, module=package)
+ else:
+ if op_isdir(path):
+ # In this case we *can* be lazy since we know
+ # that each module in the dir will be fully
+ # loaded before its tests are executed; we
+ # also know that we're not going to be asked
+ # to load from . and ./some_module.py *as part
+ # of this named test load*
+ return LazySuite(
+ lambda: self.loadTestsFromDir(path))
+ elif op_isfile(path):
+ return self.loadTestsFromFile(path)
+ else:
+ return suite([
+ Failure(OSError, "No such file %s" % path,
+ address=addr.totuple())])
+ else:
+ # just a function? what to do? I think it can only be
+ # handled when module is not None
+ return suite([
+ Failure(ValueError, "Unresolvable test name %s" % name,
+ address=addr.totuple())])
+
+ def loadTestsFromNames(self, names, module=None):
+ """Load tests from all names, returning a suite containing all
+ tests.
+ """
+ plug_res = self.config.plugins.loadTestsFromNames(names, module)
+ if plug_res:
+ suite, names = plug_res
+ if suite:
+ return self.suiteClass([
+ self.suiteClass(suite),
+ unittest.TestLoader.loadTestsFromNames(self, names, module)
+ ])
+ return unittest.TestLoader.loadTestsFromNames(self, names, module)
+
+ def loadTestsFromTestCase(self, testCaseClass):
+ """Load tests from a unittest.TestCase subclass.
+ """
+ cases = []
+ plugins = self.config.plugins
+ for case in plugins.loadTestsFromTestCase(testCaseClass):
+ cases.append(case)
+ # For efficiency in the most common case, just call and return from
+ # super. This avoids having to extract cases and rebuild a context
+ # suite when there are no plugin-contributed cases.
+ if not cases:
+ return super(TestLoader, self).loadTestsFromTestCase(testCaseClass)
+ cases.extend(
+ [case for case in
+ super(TestLoader, self).loadTestsFromTestCase(testCaseClass)])
+ return self.suiteClass(cases)
+
+ def loadTestsFromTestClass(self, cls):
+ """Load tests from a test class that is *not* a unittest.TestCase
+ subclass.
+
+ In this case, we can't depend on the class's `__init__` taking method
+ name arguments, so we have to compose a MethodTestCase for each
+ method in the class that looks testlike.
+ """
+ def wanted(attr, cls=cls, sel=self.selector):
+ item = getattr(cls, attr, None)
+ if isfunction(item):
+ item = unbound_method(cls, item)
+ elif not ismethod(item):
+ return False
+ return sel.wantMethod(item)
+ cases = [self.makeTest(getattr(cls, case), cls)
+ for case in filter(wanted, dir(cls))]
+ for test in self.config.plugins.loadTestsFromTestClass(cls):
+ cases.append(test)
+ return self.suiteClass(ContextList(cases, context=cls))
+
+ def makeTest(self, obj, parent=None):
+ try:
+ return self._makeTest(obj, parent)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ exc = sys.exc_info()
+ try:
+ addr = test_address(obj)
+ except KeyboardInterrupt:
+ raise
+ except:
+ addr = None
+ return Failure(exc[0], exc[1], exc[2], address=addr)
+
+ def _makeTest(self, obj, parent=None):
+ """Given a test object and its parent, return a test case
+ or test suite.
+ """
+ plug_tests = []
+ try:
+ addr = test_address(obj)
+ except KeyboardInterrupt:
+ raise
+ except:
+ addr = None
+ for test in self.config.plugins.makeTest(obj, parent):
+ plug_tests.append(test)
+ # TODO: is this try/except needed?
+ try:
+ if plug_tests:
+ return self.suiteClass(plug_tests)
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ exc = sys.exc_info()
+ return Failure(exc[0], exc[1], exc[2], address=addr)
+
+ if isfunction(obj) and parent and not isinstance(parent, types.ModuleType):
+ # This is a Python 3.x 'unbound method'. Wrap it with its
+ # associated class..
+ obj = unbound_method(parent, obj)
+
+ if isinstance(obj, unittest.TestCase):
+ return obj
+ elif isclass(obj):
+ if parent and obj.__module__ != parent.__name__:
+ obj = transplant_class(obj, parent.__name__)
+ if issubclass(obj, unittest.TestCase):
+ return self.loadTestsFromTestCase(obj)
+ else:
+ return self.loadTestsFromTestClass(obj)
+ elif ismethod(obj):
+ if parent is None:
+ parent = obj.__class__
+ if issubclass(parent, unittest.TestCase):
+ return parent(obj.__name__)
+ else:
+ if isgenerator(obj):
+ return self.loadTestsFromGeneratorMethod(obj, parent)
+ else:
+ return MethodTestCase(obj)
+ elif isfunction(obj):
+ if parent and obj.__module__ != parent.__name__:
+ obj = transplant_func(obj, parent.__name__)
+ if isgenerator(obj):
+ return self.loadTestsFromGenerator(obj, parent)
+ else:
+ return FunctionTestCase(obj)
+ else:
+ return Failure(TypeError,
+ "Can't make a test from %s" % obj,
+ address=addr)
+
+ def resolve(self, name, module):
+ """Resolve name within module
+ """
+ obj = module
+ parts = name.split('.')
+ for part in parts:
+ parent, obj = obj, getattr(obj, part, None)
+ if obj is None:
+ # no such test
+ obj = Failure(ValueError, "No such test %s" % name)
+ return parent, obj
+
+ def parseGeneratedTest(self, test):
+ """Given the yield value of a test generator, return a func and args.
+
+ This is used in the two loadTestsFromGenerator* methods.
+
+ """
+ if not isinstance(test, tuple): # yield test
+ test_func, arg = (test, tuple())
+ elif len(test) == 1: # yield (test,)
+ test_func, arg = (test[0], tuple())
+ else: # yield test, foo, bar, ...
+ assert len(test) > 1 # sanity check
+ test_func, arg = (test[0], test[1:])
+ return test_func, arg
+
+defaultTestLoader = TestLoader
+
diff --git a/lib/spack/external/nose/plugins/__init__.py b/lib/spack/external/nose/plugins/__init__.py
new file mode 100644
index 0000000000..08ee8f3230
--- /dev/null
+++ b/lib/spack/external/nose/plugins/__init__.py
@@ -0,0 +1,190 @@
+"""
+Writing Plugins
+---------------
+
+nose supports plugins for test collection, selection, observation and
+reporting. There are two basic rules for plugins:
+
+* Plugin classes should subclass :class:`nose.plugins.Plugin`.
+
+* Plugins may implement any of the methods described in the class
+ :doc:`IPluginInterface <interface>` in nose.plugins.base. Please note that
+ this class is for documentary purposes only; plugins may not subclass
+ IPluginInterface.
+
+Hello World
+===========
+
+Here's a basic plugin. It doesn't do much so read on for more ideas or dive
+into the :doc:`IPluginInterface <interface>` to see all available hooks.
+
+.. code-block:: python
+
+ import logging
+ import os
+
+ from nose.plugins import Plugin
+
+ log = logging.getLogger('nose.plugins.helloworld')
+
+ class HelloWorld(Plugin):
+ name = 'helloworld'
+
+ def options(self, parser, env=os.environ):
+ super(HelloWorld, self).options(parser, env=env)
+
+ def configure(self, options, conf):
+ super(HelloWorld, self).configure(options, conf)
+ if not self.enabled:
+ return
+
+ def finalize(self, result):
+ log.info('Hello pluginized world!')
+
+Registering
+===========
+
+.. Note::
+ Important note: the following applies only to the default
+ plugin manager. Other plugin managers may use different means to
+ locate and load plugins.
+
+For nose to find a plugin, it must be part of a package that uses
+setuptools_, and the plugin must be included in the entry points defined
+in the setup.py for the package:
+
+.. code-block:: python
+
+ setup(name='Some plugin',
+ # ...
+ entry_points = {
+ 'nose.plugins.0.10': [
+ 'someplugin = someplugin:SomePlugin'
+ ]
+ },
+ # ...
+ )
+
+Once the package is installed with install or develop, nose will be able
+to load the plugin.
+
+.. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
+
+Registering a plugin without setuptools
+=======================================
+
+It is currently possible to register a plugin programmatically by
+creating a custom nose runner like this :
+
+.. code-block:: python
+
+ import nose
+ from yourplugin import YourPlugin
+
+ if __name__ == '__main__':
+ nose.main(addplugins=[YourPlugin()])
+
+Defining options
+================
+
+All plugins must implement the methods ``options(self, parser, env)``
+and ``configure(self, options, conf)``. Subclasses of nose.plugins.Plugin
+that want the standard options should call the superclass methods.
+
+nose uses optparse.OptionParser from the standard library to parse
+arguments. A plugin's ``options()`` method receives a parser
+instance. It's good form for a plugin to use that instance only to add
+additional arguments that take only long arguments (--like-this). Most
+of nose's built-in arguments get their default value from an environment
+variable.
+
+A plugin's ``configure()`` method receives the parsed ``OptionParser`` options
+object, as well as the current config object. Plugins should configure their
+behavior based on the user-selected settings, and may raise exceptions
+if the configured behavior is nonsensical.
+
+Logging
+=======
+
+nose uses the logging classes from the standard library. To enable users
+to view debug messages easily, plugins should use ``logging.getLogger()`` to
+acquire a logger in the ``nose.plugins`` namespace.
+
+Recipes
+=======
+
+* Writing a plugin that monitors or controls test result output
+
+ Implement any or all of ``addError``, ``addFailure``, etc., to monitor test
+ results. If you also want to monitor output, implement
+ ``setOutputStream`` and keep a reference to the output stream. If you
+ want to prevent the builtin ``TextTestResult`` output, implement
+ ``setOutputSteam`` and *return a dummy stream*. The default output will go
+ to the dummy stream, while you send your desired output to the real stream.
+
+ Example: `examples/html_plugin/htmlplug.py`_
+
+* Writing a plugin that handles exceptions
+
+ Subclass :doc:`ErrorClassPlugin <errorclasses>`.
+
+ Examples: :doc:`nose.plugins.deprecated <deprecated>`,
+ :doc:`nose.plugins.skip <skip>`
+
+* Writing a plugin that adds detail to error reports
+
+ Implement ``formatError`` and/or ``formatFailure``. The error tuple
+ you return (error class, error message, traceback) will replace the
+ original error tuple.
+
+ Examples: :doc:`nose.plugins.capture <capture>`,
+ :doc:`nose.plugins.failuredetail <failuredetail>`
+
+* Writing a plugin that loads tests from files other than python modules
+
+ Implement ``wantFile`` and ``loadTestsFromFile``. In ``wantFile``,
+ return True for files that you want to examine for tests. In
+ ``loadTestsFromFile``, for those files, return an iterable
+ containing TestCases (or yield them as you find them;
+ ``loadTestsFromFile`` may also be a generator).
+
+ Example: :doc:`nose.plugins.doctests <doctests>`
+
+* Writing a plugin that prints a report
+
+ Implement ``begin`` if you need to perform setup before testing
+ begins. Implement ``report`` and output your report to the provided stream.
+
+ Examples: :doc:`nose.plugins.cover <cover>`, :doc:`nose.plugins.prof <prof>`
+
+* Writing a plugin that selects or rejects tests
+
+ Implement any or all ``want*`` methods. Return False to reject the test
+ candidate, True to accept it -- which means that the test candidate
+ will pass through the rest of the system, so you must be prepared to
+ load tests from it if tests can't be loaded by the core loader or
+ another plugin -- and None if you don't care.
+
+ Examples: :doc:`nose.plugins.attrib <attrib>`,
+ :doc:`nose.plugins.doctests <doctests>`, :doc:`nose.plugins.testid <testid>`
+
+
+More Examples
+=============
+
+See any builtin plugin or example plugin in the examples_ directory in
+the nose source distribution. There is a list of third-party plugins
+`on jottit`_.
+
+.. _examples/html_plugin/htmlplug.py: http://python-nose.googlecode.com/svn/trunk/examples/html_plugin/htmlplug.py
+.. _examples: http://python-nose.googlecode.com/svn/trunk/examples
+.. _on jottit: http://nose-plugins.jottit.com/
+
+"""
+from nose.plugins.base import Plugin
+from nose.plugins.manager import *
+from nose.plugins.plugintest import PluginTester
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/lib/spack/external/nose/plugins/allmodules.py b/lib/spack/external/nose/plugins/allmodules.py
new file mode 100644
index 0000000000..1ccd7773a7
--- /dev/null
+++ b/lib/spack/external/nose/plugins/allmodules.py
@@ -0,0 +1,45 @@
+"""Use the AllModules plugin by passing ``--all-modules`` or setting the
+NOSE_ALL_MODULES environment variable to enable collection and execution of
+tests in all python modules. Normal nose behavior is to look for tests only in
+modules that match testMatch.
+
+More information: :doc:`../doc_tests/test_allmodules/test_allmodules`
+
+.. warning ::
+
+ This plugin can have surprising interactions with plugins that load tests
+ from what nose normally considers non-test modules, such as
+ the :doc:`doctest plugin <doctests>`. This is because any given
+ object in a module can't be loaded both by a plugin and the normal nose
+ :class:`test loader <nose.loader.TestLoader>`. Also, if you have functions
+ or classes in non-test modules that look like tests but aren't, you will
+ likely see errors as nose attempts to run them as tests.
+
+"""
+
+import os
+from nose.plugins.base import Plugin
+
+class AllModules(Plugin):
+ """Collect tests from all python modules.
+ """
+ def options(self, parser, env):
+ """Register commandline options.
+ """
+ env_opt = 'NOSE_ALL_MODULES'
+ parser.add_option('--all-modules',
+ action="store_true",
+ dest=self.enableOpt,
+ default=env.get(env_opt),
+ help="Enable plugin %s: %s [%s]" %
+ (self.__class__.__name__, self.help(), env_opt))
+
+ def wantFile(self, file):
+ """Override to return True for all files ending with .py"""
+ # always want .py files
+ if file.endswith('.py'):
+ return True
+
+ def wantModule(self, module):
+ """Override return True for all modules"""
+ return True
diff --git a/lib/spack/external/nose/plugins/attrib.py b/lib/spack/external/nose/plugins/attrib.py
new file mode 100644
index 0000000000..3d4422a23a
--- /dev/null
+++ b/lib/spack/external/nose/plugins/attrib.py
@@ -0,0 +1,286 @@
+"""Attribute selector plugin.
+
+Oftentimes when testing you will want to select tests based on
+criteria rather then simply by filename. For example, you might want
+to run all tests except for the slow ones. You can do this with the
+Attribute selector plugin by setting attributes on your test methods.
+Here is an example:
+
+.. code-block:: python
+
+ def test_big_download():
+ import urllib
+ # commence slowness...
+
+ test_big_download.slow = 1
+
+Once you've assigned an attribute ``slow = 1`` you can exclude that
+test and all other tests having the slow attribute by running ::
+
+ $ nosetests -a '!slow'
+
+There is also a decorator available for you that will set attributes.
+Here's how to set ``slow=1`` like above with the decorator:
+
+.. code-block:: python
+
+ from nose.plugins.attrib import attr
+ @attr('slow')
+ def test_big_download():
+ import urllib
+ # commence slowness...
+
+And here's how to set an attribute with a specific value:
+
+.. code-block:: python
+
+ from nose.plugins.attrib import attr
+ @attr(speed='slow')
+ def test_big_download():
+ import urllib
+ # commence slowness...
+
+This test could be run with ::
+
+ $ nosetests -a speed=slow
+
+In Python 2.6 and higher, ``@attr`` can be used on a class to set attributes
+on all its test methods at once. For example:
+
+.. code-block:: python
+
+ from nose.plugins.attrib import attr
+ @attr(speed='slow')
+ class MyTestCase:
+ def test_long_integration(self):
+ pass
+ def test_end_to_end_something(self):
+ pass
+
+Below is a reference to the different syntaxes available.
+
+Simple syntax
+-------------
+
+Examples of using the ``-a`` and ``--attr`` options:
+
+* ``nosetests -a status=stable``
+ Only runs tests with attribute "status" having value "stable"
+
+* ``nosetests -a priority=2,status=stable``
+ Runs tests having both attributes and values
+
+* ``nosetests -a priority=2 -a slow``
+ Runs tests that match either attribute
+
+* ``nosetests -a tags=http``
+ If a test's ``tags`` attribute was a list and it contained the value
+ ``http`` then it would be run
+
+* ``nosetests -a slow``
+ Runs tests with the attribute ``slow`` if its value does not equal False
+ (False, [], "", etc...)
+
+* ``nosetests -a '!slow'``
+ Runs tests that do NOT have the attribute ``slow`` or have a ``slow``
+ attribute that is equal to False
+ **NOTE**:
+ if your shell (like bash) interprets '!' as a special character make sure to
+ put single quotes around it.
+
+Expression Evaluation
+---------------------
+
+Examples using the ``-A`` and ``--eval-attr`` options:
+
+* ``nosetests -A "not slow"``
+ Evaluates the Python expression "not slow" and runs the test if True
+
+* ``nosetests -A "(priority > 5) and not slow"``
+ Evaluates a complex Python expression and runs the test if True
+
+"""
+import inspect
+import logging
+import os
+import sys
+from inspect import isfunction
+from nose.plugins.base import Plugin
+from nose.util import tolist
+
+log = logging.getLogger('nose.plugins.attrib')
+compat_24 = sys.version_info >= (2, 4)
+
+def attr(*args, **kwargs):
+ """Decorator that adds attributes to classes or functions
+ for use with the Attribute (-a) plugin.
+ """
+ def wrap_ob(ob):
+ for name in args:
+ setattr(ob, name, True)
+ for name, value in kwargs.iteritems():
+ setattr(ob, name, value)
+ return ob
+ return wrap_ob
+
+def get_method_attr(method, cls, attr_name, default = False):
+ """Look up an attribute on a method/ function.
+ If the attribute isn't found there, looking it up in the
+ method's class, if any.
+ """
+ Missing = object()
+ value = getattr(method, attr_name, Missing)
+ if value is Missing and cls is not None:
+ value = getattr(cls, attr_name, Missing)
+ if value is Missing:
+ return default
+ return value
+
+
+class ContextHelper:
+ """Object that can act as context dictionary for eval and looks up
+ names as attributes on a method/ function and its class.
+ """
+ def __init__(self, method, cls):
+ self.method = method
+ self.cls = cls
+
+ def __getitem__(self, name):
+ return get_method_attr(self.method, self.cls, name)
+
+
+class AttributeSelector(Plugin):
+ """Selects test cases to be run based on their attributes.
+ """
+
+ def __init__(self):
+ Plugin.__init__(self)
+ self.attribs = []
+
+ def options(self, parser, env):
+ """Register command line options"""
+ parser.add_option("-a", "--attr",
+ dest="attr", action="append",
+ default=env.get('NOSE_ATTR'),
+ metavar="ATTR",
+ help="Run only tests that have attributes "
+ "specified by ATTR [NOSE_ATTR]")
+ # disable in < 2.4: eval can't take needed args
+ if compat_24:
+ parser.add_option("-A", "--eval-attr",
+ dest="eval_attr", metavar="EXPR", action="append",
+ default=env.get('NOSE_EVAL_ATTR'),
+ help="Run only tests for whose attributes "
+ "the Python expression EXPR evaluates "
+ "to True [NOSE_EVAL_ATTR]")
+
+ def configure(self, options, config):
+ """Configure the plugin and system, based on selected options.
+
+ attr and eval_attr may each be lists.
+
+ self.attribs will be a list of lists of tuples. In that list, each
+ list is a group of attributes, all of which must match for the rule to
+ match.
+ """
+ self.attribs = []
+
+ # handle python eval-expression parameter
+ if compat_24 and options.eval_attr:
+ eval_attr = tolist(options.eval_attr)
+ for attr in eval_attr:
+ # "<python expression>"
+ # -> eval(expr) in attribute context must be True
+ def eval_in_context(expr, obj, cls):
+ return eval(expr, None, ContextHelper(obj, cls))
+ self.attribs.append([(attr, eval_in_context)])
+
+ # attribute requirements are a comma separated list of
+ # 'key=value' pairs
+ if options.attr:
+ std_attr = tolist(options.attr)
+ for attr in std_attr:
+ # all attributes within an attribute group must match
+ attr_group = []
+ for attrib in attr.strip().split(","):
+ # don't die on trailing comma
+ if not attrib:
+ continue
+ items = attrib.split("=", 1)
+ if len(items) > 1:
+ # "name=value"
+ # -> 'str(obj.name) == value' must be True
+ key, value = items
+ else:
+ key = items[0]
+ if key[0] == "!":
+ # "!name"
+ # 'bool(obj.name)' must be False
+ key = key[1:]
+ value = False
+ else:
+ # "name"
+ # -> 'bool(obj.name)' must be True
+ value = True
+ attr_group.append((key, value))
+ self.attribs.append(attr_group)
+ if self.attribs:
+ self.enabled = True
+
+ def validateAttrib(self, method, cls = None):
+ """Verify whether a method has the required attributes
+ The method is considered a match if it matches all attributes
+ for any attribute group.
+ ."""
+ # TODO: is there a need for case-sensitive value comparison?
+ any = False
+ for group in self.attribs:
+ match = True
+ for key, value in group:
+ attr = get_method_attr(method, cls, key)
+ if callable(value):
+ if not value(key, method, cls):
+ match = False
+ break
+ elif value is True:
+ # value must exist and be True
+ if not bool(attr):
+ match = False
+ break
+ elif value is False:
+ # value must not exist or be False
+ if bool(attr):
+ match = False
+ break
+ elif type(attr) in (list, tuple):
+ # value must be found in the list attribute
+ if not str(value).lower() in [str(x).lower()
+ for x in attr]:
+ match = False
+ break
+ else:
+ # value must match, convert to string and compare
+ if (value != attr
+ and str(value).lower() != str(attr).lower()):
+ match = False
+ break
+ any = any or match
+ if any:
+ # not True because we don't want to FORCE the selection of the
+ # item, only say that it is acceptable
+ return None
+ return False
+
+ def wantFunction(self, function):
+ """Accept the function if its attributes match.
+ """
+ return self.validateAttrib(function)
+
+ def wantMethod(self, method):
+ """Accept the method if its attributes match.
+ """
+ try:
+ cls = method.im_class
+ except AttributeError:
+ return False
+ return self.validateAttrib(method, cls)
diff --git a/lib/spack/external/nose/plugins/base.py b/lib/spack/external/nose/plugins/base.py
new file mode 100644
index 0000000000..f09beb696f
--- /dev/null
+++ b/lib/spack/external/nose/plugins/base.py
@@ -0,0 +1,725 @@
+import os
+import textwrap
+from optparse import OptionConflictError
+from warnings import warn
+from nose.util import tolist
+
+class Plugin(object):
+ """Base class for nose plugins. It's recommended but not *necessary* to
+ subclass this class to create a plugin, but all plugins *must* implement
+ `options(self, parser, env)` and `configure(self, options, conf)`, and
+ must have the attributes `enabled`, `name` and `score`. The `name`
+ attribute may contain hyphens ('-').
+
+ Plugins should not be enabled by default.
+
+ Subclassing Plugin (and calling the superclass methods in
+ __init__, configure, and options, if you override them) will give
+ your plugin some friendly default behavior:
+
+ * A --with-$name option will be added to the command line interface
+ to enable the plugin, and a corresponding environment variable
+ will be used as the default value. The plugin class's docstring
+ will be used as the help for this option.
+ * The plugin will not be enabled unless this option is selected by
+ the user.
+ """
+ can_configure = False
+ enabled = False
+ enableOpt = None
+ name = None
+ score = 100
+
+ def __init__(self):
+ if self.name is None:
+ self.name = self.__class__.__name__.lower()
+ if self.enableOpt is None:
+ self.enableOpt = "enable_plugin_%s" % self.name.replace('-', '_')
+
+ def addOptions(self, parser, env=None):
+ """Add command-line options for this plugin.
+
+ The base plugin class adds --with-$name by default, used to enable the
+ plugin.
+
+ .. warning :: Don't implement addOptions unless you want to override
+ all default option handling behavior, including
+ warnings for conflicting options. Implement
+ :meth:`options
+ <nose.plugins.base.IPluginInterface.options>`
+ instead.
+ """
+ self.add_options(parser, env)
+
+ def add_options(self, parser, env=None):
+ """Non-camel-case version of func name for backwards compatibility.
+
+ .. warning ::
+
+ DEPRECATED: Do not use this method,
+ use :meth:`options <nose.plugins.base.IPluginInterface.options>`
+ instead.
+
+ """
+ # FIXME raise deprecation warning if wasn't called by wrapper
+ if env is None:
+ env = os.environ
+ try:
+ self.options(parser, env)
+ self.can_configure = True
+ except OptionConflictError, e:
+ warn("Plugin %s has conflicting option string: %s and will "
+ "be disabled" % (self, e), RuntimeWarning)
+ self.enabled = False
+ self.can_configure = False
+
+ def options(self, parser, env):
+ """Register commandline options.
+
+ Implement this method for normal options behavior with protection from
+ OptionConflictErrors. If you override this method and want the default
+ --with-$name option to be registered, be sure to call super().
+ """
+ env_opt = 'NOSE_WITH_%s' % self.name.upper()
+ env_opt = env_opt.replace('-', '_')
+ parser.add_option("--with-%s" % self.name,
+ action="store_true",
+ dest=self.enableOpt,
+ default=env.get(env_opt),
+ help="Enable plugin %s: %s [%s]" %
+ (self.__class__.__name__, self.help(), env_opt))
+
+ def configure(self, options, conf):
+ """Configure the plugin and system, based on selected options.
+
+ The base plugin class sets the plugin to enabled if the enable option
+ for the plugin (self.enableOpt) is true.
+ """
+ if not self.can_configure:
+ return
+ self.conf = conf
+ if hasattr(options, self.enableOpt):
+ self.enabled = getattr(options, self.enableOpt)
+
+ def help(self):
+ """Return help for this plugin. This will be output as the help
+ section of the --with-$name option that enables the plugin.
+ """
+ if self.__class__.__doc__:
+ # doc sections are often indented; compress the spaces
+ return textwrap.dedent(self.__class__.__doc__)
+ return "(no help available)"
+
+ # Compatiblity shim
+ def tolist(self, val):
+ warn("Plugin.tolist is deprecated. Use nose.util.tolist instead",
+ DeprecationWarning)
+ return tolist(val)
+
+
+class IPluginInterface(object):
+ """
+ IPluginInterface describes the plugin API. Do not subclass or use this
+ class directly.
+ """
+ def __new__(cls, *arg, **kw):
+ raise TypeError("IPluginInterface class is for documentation only")
+
+ def addOptions(self, parser, env):
+ """Called to allow plugin to register command-line options with the
+ parser. DO NOT return a value from this method unless you want to stop
+ all other plugins from setting their options.
+
+ .. warning ::
+
+ DEPRECATED -- implement
+ :meth:`options <nose.plugins.base.IPluginInterface.options>` instead.
+ """
+ pass
+ add_options = addOptions
+ add_options.deprecated = True
+
+ def addDeprecated(self, test):
+ """Called when a deprecated test is seen. DO NOT return a value
+ unless you want to stop other plugins from seeing the deprecated
+ test.
+
+ .. warning :: DEPRECATED -- check error class in addError instead
+ """
+ pass
+ addDeprecated.deprecated = True
+
+ def addError(self, test, err):
+ """Called when a test raises an uncaught exception. DO NOT return a
+ value unless you want to stop other plugins from seeing that the
+ test has raised an error.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ :param err: sys.exc_info() tuple
+ :type err: 3-tuple
+ """
+ pass
+ addError.changed = True
+
+ def addFailure(self, test, err):
+ """Called when a test fails. DO NOT return a value unless you
+ want to stop other plugins from seeing that the test has failed.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ :param err: 3-tuple
+ :type err: sys.exc_info() tuple
+ """
+ pass
+ addFailure.changed = True
+
+ def addSkip(self, test):
+ """Called when a test is skipped. DO NOT return a value unless
+ you want to stop other plugins from seeing the skipped test.
+
+ .. warning:: DEPRECATED -- check error class in addError instead
+ """
+ pass
+ addSkip.deprecated = True
+
+ def addSuccess(self, test):
+ """Called when a test passes. DO NOT return a value unless you
+ want to stop other plugins from seeing the passing test.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+ addSuccess.changed = True
+
+ def afterContext(self):
+ """Called after a context (generally a module) has been
+ lazy-loaded, imported, setup, had its tests loaded and
+ executed, and torn down.
+ """
+ pass
+ afterContext._new = True
+
+ def afterDirectory(self, path):
+ """Called after all tests have been loaded from directory at path
+ and run.
+
+ :param path: the directory that has finished processing
+ :type path: string
+ """
+ pass
+ afterDirectory._new = True
+
+ def afterImport(self, filename, module):
+ """Called after module is imported from filename. afterImport
+ is called even if the import failed.
+
+ :param filename: The file that was loaded
+ :type filename: string
+ :param module: The name of the module
+ :type module: string
+ """
+ pass
+ afterImport._new = True
+
+ def afterTest(self, test):
+ """Called after the test has been run and the result recorded
+ (after stopTest).
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+ afterTest._new = True
+
+ def beforeContext(self):
+ """Called before a context (generally a module) is
+ examined. Because the context is not yet loaded, plugins don't
+ get to know what the context is; so any context operations
+ should use a stack that is pushed in `beforeContext` and popped
+ in `afterContext` to ensure they operate symmetrically.
+
+ `beforeContext` and `afterContext` are mainly useful for tracking
+ and restoring global state around possible changes from within a
+ context, whatever the context may be. If you need to operate on
+ contexts themselves, see `startContext` and `stopContext`, which
+ are passed the context in question, but are called after
+ it has been loaded (imported in the module case).
+ """
+ pass
+ beforeContext._new = True
+
+ def beforeDirectory(self, path):
+ """Called before tests are loaded from directory at path.
+
+ :param path: the directory that is about to be processed
+ """
+ pass
+ beforeDirectory._new = True
+
+ def beforeImport(self, filename, module):
+ """Called before module is imported from filename.
+
+ :param filename: The file that will be loaded
+ :param module: The name of the module found in file
+ :type module: string
+ """
+ beforeImport._new = True
+
+ def beforeTest(self, test):
+ """Called before the test is run (before startTest).
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+ beforeTest._new = True
+
+ def begin(self):
+ """Called before any tests are collected or run. Use this to
+ perform any setup needed before testing begins.
+ """
+ pass
+
+ def configure(self, options, conf):
+ """Called after the command line has been parsed, with the
+ parsed options and the config container. Here, implement any
+ config storage or changes to state or operation that are set
+ by command line options.
+
+ DO NOT return a value from this method unless you want to
+ stop all other plugins from being configured.
+ """
+ pass
+
+ def finalize(self, result):
+ """Called after all report output, including output from all
+ plugins, has been sent to the stream. Use this to print final
+ test results or perform final cleanup. Return None to allow
+ other plugins to continue printing, or any other value to stop
+ them.
+
+ :param result: test result object
+
+ .. Note:: When tests are run under a test runner other than
+ :class:`nose.core.TextTestRunner`, such as
+ via ``python setup.py test``, this method may be called
+ **before** the default report output is sent.
+ """
+ pass
+
+ def describeTest(self, test):
+ """Return a test description.
+
+ Called by :meth:`nose.case.Test.shortDescription`.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+ describeTest._new = True
+
+ def formatError(self, test, err):
+ """Called in result.addError, before plugin.addError. If you
+ want to replace or modify the error tuple, return a new error
+ tuple, otherwise return err, the original error tuple.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ :param err: sys.exc_info() tuple
+ :type err: 3-tuple
+ """
+ pass
+ formatError._new = True
+ formatError.chainable = True
+ # test arg is not chainable
+ formatError.static_args = (True, False)
+
+ def formatFailure(self, test, err):
+ """Called in result.addFailure, before plugin.addFailure. If you
+ want to replace or modify the error tuple, return a new error
+ tuple, otherwise return err, the original error tuple.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ :param err: sys.exc_info() tuple
+ :type err: 3-tuple
+ """
+ pass
+ formatFailure._new = True
+ formatFailure.chainable = True
+ # test arg is not chainable
+ formatFailure.static_args = (True, False)
+
+ def handleError(self, test, err):
+ """Called on addError. To handle the error yourself and prevent normal
+ error processing, return a true value.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ :param err: sys.exc_info() tuple
+ :type err: 3-tuple
+ """
+ pass
+ handleError._new = True
+
+ def handleFailure(self, test, err):
+ """Called on addFailure. To handle the failure yourself and
+ prevent normal failure processing, return a true value.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ :param err: sys.exc_info() tuple
+ :type err: 3-tuple
+ """
+ pass
+ handleFailure._new = True
+
+ def loadTestsFromDir(self, path):
+ """Return iterable of tests from a directory. May be a
+ generator. Each item returned must be a runnable
+ unittest.TestCase (or subclass) instance or suite instance.
+ Return None if your plugin cannot collect any tests from
+ directory.
+
+ :param path: The path to the directory.
+ """
+ pass
+ loadTestsFromDir.generative = True
+ loadTestsFromDir._new = True
+
+ def loadTestsFromModule(self, module, path=None):
+ """Return iterable of tests in a module. May be a
+ generator. Each item returned must be a runnable
+ unittest.TestCase (or subclass) instance.
+ Return None if your plugin cannot
+ collect any tests from module.
+
+ :param module: The module object
+ :type module: python module
+ :param path: the path of the module to search, to distinguish from
+ namespace package modules
+
+ .. note::
+
+ NEW. The ``path`` parameter will only be passed by nose 0.11
+ or above.
+ """
+ pass
+ loadTestsFromModule.generative = True
+
+ def loadTestsFromName(self, name, module=None, importPath=None):
+ """Return tests in this file or module. Return None if you are not able
+ to load any tests, or an iterable if you are. May be a
+ generator.
+
+ :param name: The test name. May be a file or module name plus a test
+ callable. Use split_test_name to split into parts. Or it might
+ be some crazy name of your own devising, in which case, do
+ whatever you want.
+ :param module: Module from which the name is to be loaded
+ :param importPath: Path from which file (must be a python module) was
+ found
+
+ .. warning:: DEPRECATED: this argument will NOT be passed.
+ """
+ pass
+ loadTestsFromName.generative = True
+
+ def loadTestsFromNames(self, names, module=None):
+ """Return a tuple of (tests loaded, remaining names). Return
+ None if you are not able to load any tests. Multiple plugins
+ may implement loadTestsFromNames; the remaining name list from
+ each will be passed to the next as input.
+
+ :param names: List of test names.
+ :type names: iterable
+ :param module: Module from which the names are to be loaded
+ """
+ pass
+ loadTestsFromNames._new = True
+ loadTestsFromNames.chainable = True
+
+ def loadTestsFromFile(self, filename):
+ """Return tests in this file. Return None if you are not
+ interested in loading any tests, or an iterable if you are and
+ can load some. May be a generator. *If you are interested in
+ loading tests from the file and encounter no errors, but find
+ no tests, yield False or return [False].*
+
+ .. Note:: This method replaces loadTestsFromPath from the 0.9
+ API.
+
+ :param filename: The full path to the file or directory.
+ """
+ pass
+ loadTestsFromFile.generative = True
+ loadTestsFromFile._new = True
+
+ def loadTestsFromPath(self, path):
+ """
+ .. warning:: DEPRECATED -- use loadTestsFromFile instead
+ """
+ pass
+ loadTestsFromPath.deprecated = True
+
+ def loadTestsFromTestCase(self, cls):
+ """Return tests in this test case class. Return None if you are
+ not able to load any tests, or an iterable if you are. May be a
+ generator.
+
+ :param cls: The test case class. Must be subclass of
+ :class:`unittest.TestCase`.
+ """
+ pass
+ loadTestsFromTestCase.generative = True
+
+ def loadTestsFromTestClass(self, cls):
+ """Return tests in this test class. Class will *not* be a
+ unittest.TestCase subclass. Return None if you are not able to
+ load any tests, an iterable if you are. May be a generator.
+
+ :param cls: The test case class. Must be **not** be subclass of
+ :class:`unittest.TestCase`.
+ """
+ pass
+ loadTestsFromTestClass._new = True
+ loadTestsFromTestClass.generative = True
+
+ def makeTest(self, obj, parent):
+ """Given an object and its parent, return or yield one or more
+ test cases. Each test must be a unittest.TestCase (or subclass)
+ instance. This is called before default test loading to allow
+ plugins to load an alternate test case or cases for an
+ object. May be a generator.
+
+ :param obj: The object to be made into a test
+ :param parent: The parent of obj (eg, for a method, the class)
+ """
+ pass
+ makeTest._new = True
+ makeTest.generative = True
+
+ def options(self, parser, env):
+ """Called to allow plugin to register command line
+ options with the parser.
+
+ DO NOT return a value from this method unless you want to stop
+ all other plugins from setting their options.
+
+ :param parser: options parser instance
+ :type parser: :class:`ConfigParser.ConfigParser`
+ :param env: environment, default is os.environ
+ """
+ pass
+ options._new = True
+
+ def prepareTest(self, test):
+ """Called before the test is run by the test runner. Please
+ note the article *the* in the previous sentence: prepareTest
+ is called *only once*, and is passed the test case or test
+ suite that the test runner will execute. It is *not* called
+ for each individual test case. If you return a non-None value,
+ that return value will be run as the test. Use this hook to
+ wrap or decorate the test with another function. If you need
+ to modify or wrap individual test cases, use `prepareTestCase`
+ instead.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+
+ def prepareTestCase(self, test):
+ """Prepare or wrap an individual test case. Called before
+ execution of the test. The test passed here is a
+ nose.case.Test instance; the case to be executed is in the
+ test attribute of the passed case. To modify the test to be
+ run, you should return a callable that takes one argument (the
+ test result object) -- it is recommended that you *do not*
+ side-effect the nose.case.Test instance you have been passed.
+
+ Keep in mind that when you replace the test callable you are
+ replacing the run() method of the test case -- including the
+ exception handling and result calls, etc.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+ prepareTestCase._new = True
+
+ def prepareTestLoader(self, loader):
+ """Called before tests are loaded. To replace the test loader,
+ return a test loader. To allow other plugins to process the
+ test loader, return None. Only one plugin may replace the test
+ loader. Only valid when using nose.TestProgram.
+
+ :param loader: :class:`nose.loader.TestLoader`
+ (or other loader) instance
+ """
+ pass
+ prepareTestLoader._new = True
+
+ def prepareTestResult(self, result):
+ """Called before the first test is run. To use a different
+ test result handler for all tests than the given result,
+ return a test result handler. NOTE however that this handler
+ will only be seen by tests, that is, inside of the result
+ proxy system. The TestRunner and TestProgram -- whether nose's
+ or other -- will continue to see the original result
+ handler. For this reason, it is usually better to monkeypatch
+ the result (for instance, if you want to handle some
+ exceptions in a unique way). Only one plugin may replace the
+ result, but many may monkeypatch it. If you want to
+ monkeypatch and stop other plugins from doing so, monkeypatch
+ and return the patched result.
+
+ :param result: :class:`nose.result.TextTestResult`
+ (or other result) instance
+ """
+ pass
+ prepareTestResult._new = True
+
+ def prepareTestRunner(self, runner):
+ """Called before tests are run. To replace the test runner,
+ return a test runner. To allow other plugins to process the
+ test runner, return None. Only valid when using nose.TestProgram.
+
+ :param runner: :class:`nose.core.TextTestRunner`
+ (or other runner) instance
+ """
+ pass
+ prepareTestRunner._new = True
+
+ def report(self, stream):
+ """Called after all error output has been printed. Print your
+ plugin's report to the provided stream. Return None to allow
+ other plugins to print reports, any other value to stop them.
+
+ :param stream: stream object; send your output here
+ :type stream: file-like object
+ """
+ pass
+
+ def setOutputStream(self, stream):
+ """Called before test output begins. To direct test output to a
+ new stream, return a stream object, which must implement a
+ `write(msg)` method. If you only want to note the stream, not
+ capture or redirect it, then return None.
+
+ :param stream: stream object; send your output here
+ :type stream: file-like object
+ """
+
+ def startContext(self, context):
+ """Called before context setup and the running of tests in the
+ context. Note that tests have already been *loaded* from the
+ context before this call.
+
+ :param context: the context about to be setup. May be a module or
+ class, or any other object that contains tests.
+ """
+ pass
+ startContext._new = True
+
+ def startTest(self, test):
+ """Called before each test is run. DO NOT return a value unless
+ you want to stop other plugins from seeing the test start.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+
+ def stopContext(self, context):
+ """Called after the tests in a context have run and the
+ context has been torn down.
+
+ :param context: the context that has been torn down. May be a module or
+ class, or any other object that contains tests.
+ """
+ pass
+ stopContext._new = True
+
+ def stopTest(self, test):
+ """Called after each test is run. DO NOT return a value unless
+ you want to stop other plugins from seeing that the test has stopped.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+
+ def testName(self, test):
+ """Return a short test name. Called by `nose.case.Test.__str__`.
+
+ :param test: the test case
+ :type test: :class:`nose.case.Test`
+ """
+ pass
+ testName._new = True
+
+ def wantClass(self, cls):
+ """Return true if you want the main test selector to collect
+ tests from this class, false if you don't, and None if you don't
+ care.
+
+ :param cls: The class being examined by the selector
+ """
+ pass
+
+ def wantDirectory(self, dirname):
+ """Return true if you want test collection to descend into this
+ directory, false if you do not, and None if you don't care.
+
+ :param dirname: Full path to directory being examined by the selector
+ """
+ pass
+
+ def wantFile(self, file):
+ """Return true if you want to collect tests from this file,
+ false if you do not and None if you don't care.
+
+ Change from 0.9: The optional package parameter is no longer passed.
+
+ :param file: Full path to file being examined by the selector
+ """
+ pass
+
+ def wantFunction(self, function):
+ """Return true to collect this function as a test, false to
+ prevent it from being collected, and None if you don't care.
+
+ :param function: The function object being examined by the selector
+ """
+ pass
+
+ def wantMethod(self, method):
+ """Return true to collect this method as a test, false to
+ prevent it from being collected, and None if you don't care.
+
+ :param method: The method object being examined by the selector
+ :type method: unbound method
+ """
+ pass
+
+ def wantModule(self, module):
+ """Return true if you want to collection to descend into this
+ module, false to prevent the collector from descending into the
+ module, and None if you don't care.
+
+ :param module: The module object being examined by the selector
+ :type module: python module
+ """
+ pass
+
+ def wantModuleTests(self, module):
+ """
+ .. warning:: DEPRECATED -- this method will not be called, it has
+ been folded into wantModule.
+ """
+ pass
+ wantModuleTests.deprecated = True
+
diff --git a/lib/spack/external/nose/plugins/builtin.py b/lib/spack/external/nose/plugins/builtin.py
new file mode 100644
index 0000000000..4fcc0018ad
--- /dev/null
+++ b/lib/spack/external/nose/plugins/builtin.py
@@ -0,0 +1,34 @@
+"""
+Lists builtin plugins.
+"""
+plugins = []
+builtins = (
+ ('nose.plugins.attrib', 'AttributeSelector'),
+ ('nose.plugins.capture', 'Capture'),
+ ('nose.plugins.logcapture', 'LogCapture'),
+ ('nose.plugins.cover', 'Coverage'),
+ ('nose.plugins.debug', 'Pdb'),
+ ('nose.plugins.deprecated', 'Deprecated'),
+ ('nose.plugins.doctests', 'Doctest'),
+ ('nose.plugins.isolate', 'IsolationPlugin'),
+ ('nose.plugins.failuredetail', 'FailureDetail'),
+ ('nose.plugins.prof', 'Profile'),
+ ('nose.plugins.skip', 'Skip'),
+ ('nose.plugins.testid', 'TestId'),
+ ('nose.plugins.multiprocess', 'MultiProcess'),
+ ('nose.plugins.xunit', 'Xunit'),
+ ('nose.plugins.allmodules', 'AllModules'),
+ ('nose.plugins.collect', 'CollectOnly'),
+ )
+
+for module, cls in builtins:
+ try:
+ plugmod = __import__(module, globals(), locals(), [cls])
+ except KeyboardInterrupt:
+ raise
+ except:
+ continue
+ plug = getattr(plugmod, cls)
+ plugins.append(plug)
+ globals()[cls] = plug
+
diff --git a/lib/spack/external/nose/plugins/capture.py b/lib/spack/external/nose/plugins/capture.py
new file mode 100644
index 0000000000..fa4e5dcaaf
--- /dev/null
+++ b/lib/spack/external/nose/plugins/capture.py
@@ -0,0 +1,115 @@
+"""
+This plugin captures stdout during test execution. If the test fails
+or raises an error, the captured output will be appended to the error
+or failure output. It is enabled by default but can be disabled with
+the options ``-s`` or ``--nocapture``.
+
+:Options:
+ ``--nocapture``
+ Don't capture stdout (any stdout output will be printed immediately)
+
+"""
+import logging
+import os
+import sys
+from nose.plugins.base import Plugin
+from nose.pyversion import exc_to_unicode, force_unicode
+from nose.util import ln
+from StringIO import StringIO
+
+
+log = logging.getLogger(__name__)
+
+class Capture(Plugin):
+ """
+ Output capture plugin. Enabled by default. Disable with ``-s`` or
+ ``--nocapture``. This plugin captures stdout during test execution,
+ appending any output captured to the error or failure output,
+ should the test fail or raise an error.
+ """
+ enabled = True
+ env_opt = 'NOSE_NOCAPTURE'
+ name = 'capture'
+ score = 1600
+
+ def __init__(self):
+ self.stdout = []
+ self._buf = None
+
+ def options(self, parser, env):
+ """Register commandline options
+ """
+ parser.add_option(
+ "-s", "--nocapture", action="store_false",
+ default=not env.get(self.env_opt), dest="capture",
+ help="Don't capture stdout (any stdout output "
+ "will be printed immediately) [NOSE_NOCAPTURE]")
+
+ def configure(self, options, conf):
+ """Configure plugin. Plugin is enabled by default.
+ """
+ self.conf = conf
+ if not options.capture:
+ self.enabled = False
+
+ def afterTest(self, test):
+ """Clear capture buffer.
+ """
+ self.end()
+ self._buf = None
+
+ def begin(self):
+ """Replace sys.stdout with capture buffer.
+ """
+ self.start() # get an early handle on sys.stdout
+
+ def beforeTest(self, test):
+ """Flush capture buffer.
+ """
+ self.start()
+
+ def formatError(self, test, err):
+ """Add captured output to error report.
+ """
+ test.capturedOutput = output = self.buffer
+ self._buf = None
+ if not output:
+ # Don't return None as that will prevent other
+ # formatters from formatting and remove earlier formatters
+ # formats, instead return the err we got
+ return err
+ ec, ev, tb = err
+ return (ec, self.addCaptureToErr(ev, output), tb)
+
+ def formatFailure(self, test, err):
+ """Add captured output to failure report.
+ """
+ return self.formatError(test, err)
+
+ def addCaptureToErr(self, ev, output):
+ ev = exc_to_unicode(ev)
+ output = force_unicode(output)
+ return u'\n'.join([ev, ln(u'>> begin captured stdout <<'),
+ output, ln(u'>> end captured stdout <<')])
+
+ def start(self):
+ self.stdout.append(sys.stdout)
+ self._buf = StringIO()
+ sys.stdout = self._buf
+
+ def end(self):
+ if self.stdout:
+ sys.stdout = self.stdout.pop()
+
+ def finalize(self, result):
+ """Restore stdout.
+ """
+ while self.stdout:
+ self.end()
+
+ def _get_buffer(self):
+ if self._buf is not None:
+ return self._buf.getvalue()
+
+ buffer = property(_get_buffer, None, None,
+ """Captured stdout output.""")
diff --git a/lib/spack/external/nose/plugins/collect.py b/lib/spack/external/nose/plugins/collect.py
new file mode 100644
index 0000000000..6f9f0faa77
--- /dev/null
+++ b/lib/spack/external/nose/plugins/collect.py
@@ -0,0 +1,94 @@
+"""
+This plugin bypasses the actual execution of tests, and instead just collects
+test names. Fixtures are also bypassed, so running nosetests with the
+collection plugin enabled should be very quick.
+
+This plugin is useful in combination with the testid plugin (``--with-id``).
+Run both together to get an indexed list of all tests, which will enable you to
+run individual tests by index number.
+
+This plugin is also useful for counting tests in a test suite, and making
+people watching your demo think all of your tests pass.
+"""
+from nose.plugins.base import Plugin
+from nose.case import Test
+import logging
+import unittest
+
+log = logging.getLogger(__name__)
+
+
+class CollectOnly(Plugin):
+ """
+ Collect and output test names only, don't run any tests.
+ """
+ name = "collect-only"
+ enableOpt = 'collect_only'
+
+ def options(self, parser, env):
+ """Register commandline options.
+ """
+ parser.add_option('--collect-only',
+ action='store_true',
+ dest=self.enableOpt,
+ default=env.get('NOSE_COLLECT_ONLY'),
+ help="Enable collect-only: %s [COLLECT_ONLY]" %
+ (self.help()))
+
+ def prepareTestLoader(self, loader):
+ """Install collect-only suite class in TestLoader.
+ """
+ # Disable context awareness
+ log.debug("Preparing test loader")
+ loader.suiteClass = TestSuiteFactory(self.conf)
+
+ def prepareTestCase(self, test):
+ """Replace actual test with dummy that always passes.
+ """
+ # Return something that always passes
+ log.debug("Preparing test case %s", test)
+ if not isinstance(test, Test):
+ return
+ def run(result):
+ # We need to make these plugin calls because there won't be
+ # a result proxy, due to using a stripped-down test suite
+ self.conf.plugins.startTest(test)
+ result.startTest(test)
+ self.conf.plugins.addSuccess(test)
+ result.addSuccess(test)
+ self.conf.plugins.stopTest(test)
+ result.stopTest(test)
+ return run
+
+
+class TestSuiteFactory:
+ """
+ Factory for producing configured test suites.
+ """
+ def __init__(self, conf):
+ self.conf = conf
+
+ def __call__(self, tests=(), **kw):
+ return TestSuite(tests, conf=self.conf)
+
+
+class TestSuite(unittest.TestSuite):
+ """
+ Basic test suite that bypasses most proxy and plugin calls, but does
+ wrap tests in a nose.case.Test so prepareTestCase will be called.
+ """
+ def __init__(self, tests=(), conf=None):
+ self.conf = conf
+ # Exec lazy suites: makes discovery depth-first
+ if callable(tests):
+ tests = tests()
+ log.debug("TestSuite(%r)", tests)
+ unittest.TestSuite.__init__(self, tests)
+
+ def addTest(self, test):
+ log.debug("Add test %s", test)
+ if isinstance(test, unittest.TestSuite):
+ self._tests.append(test)
+ else:
+ self._tests.append(Test(test, config=self.conf))
+
diff --git a/lib/spack/external/nose/plugins/cover.py b/lib/spack/external/nose/plugins/cover.py
new file mode 100644
index 0000000000..fbe2e30dcd
--- /dev/null
+++ b/lib/spack/external/nose/plugins/cover.py
@@ -0,0 +1,271 @@
+"""If you have Ned Batchelder's coverage_ module installed, you may activate a
+coverage report with the ``--with-coverage`` switch or NOSE_WITH_COVERAGE
+environment variable. The coverage report will cover any python source module
+imported after the start of the test run, excluding modules that match
+testMatch. If you want to include those modules too, use the ``--cover-tests``
+switch, or set the NOSE_COVER_TESTS environment variable to a true value. To
+restrict the coverage report to modules from a particular package or packages,
+use the ``--cover-package`` switch or the NOSE_COVER_PACKAGE environment
+variable.
+
+.. _coverage: http://www.nedbatchelder.com/code/modules/coverage.html
+"""
+import logging
+import re
+import sys
+import StringIO
+from nose.plugins.base import Plugin
+from nose.util import src, tolist
+
+log = logging.getLogger(__name__)
+
+
+class Coverage(Plugin):
+ """
+ Activate a coverage report using Ned Batchelder's coverage module.
+ """
+ coverTests = False
+ coverPackages = None
+ coverInstance = None
+ coverErase = False
+ coverMinPercentage = None
+ score = 200
+ status = {}
+
+ def options(self, parser, env):
+ """
+ Add options to command line.
+ """
+ super(Coverage, self).options(parser, env)
+ parser.add_option("--cover-package", action="append",
+ default=env.get('NOSE_COVER_PACKAGE'),
+ metavar="PACKAGE",
+ dest="cover_packages",
+ help="Restrict coverage output to selected packages "
+ "[NOSE_COVER_PACKAGE]")
+ parser.add_option("--cover-erase", action="store_true",
+ default=env.get('NOSE_COVER_ERASE'),
+ dest="cover_erase",
+ help="Erase previously collected coverage "
+ "statistics before run")
+ parser.add_option("--cover-tests", action="store_true",
+ dest="cover_tests",
+ default=env.get('NOSE_COVER_TESTS'),
+ help="Include test modules in coverage report "
+ "[NOSE_COVER_TESTS]")
+ parser.add_option("--cover-min-percentage", action="store",
+ dest="cover_min_percentage",
+ default=env.get('NOSE_COVER_MIN_PERCENTAGE'),
+ help="Minimum percentage of coverage for tests "
+ "to pass [NOSE_COVER_MIN_PERCENTAGE]")
+ parser.add_option("--cover-inclusive", action="store_true",
+ dest="cover_inclusive",
+ default=env.get('NOSE_COVER_INCLUSIVE'),
+ help="Include all python files under working "
+ "directory in coverage report. Useful for "
+ "discovering holes in test coverage if not all "
+ "files are imported by the test suite. "
+ "[NOSE_COVER_INCLUSIVE]")
+ parser.add_option("--cover-html", action="store_true",
+ default=env.get('NOSE_COVER_HTML'),
+ dest='cover_html',
+ help="Produce HTML coverage information")
+ parser.add_option('--cover-html-dir', action='store',
+ default=env.get('NOSE_COVER_HTML_DIR', 'cover'),
+ dest='cover_html_dir',
+ metavar='DIR',
+ help='Produce HTML coverage information in dir')
+ parser.add_option("--cover-branches", action="store_true",
+ default=env.get('NOSE_COVER_BRANCHES'),
+ dest="cover_branches",
+ help="Include branch coverage in coverage report "
+ "[NOSE_COVER_BRANCHES]")
+ parser.add_option("--cover-xml", action="store_true",
+ default=env.get('NOSE_COVER_XML'),
+ dest="cover_xml",
+ help="Produce XML coverage information")
+ parser.add_option("--cover-xml-file", action="store",
+ default=env.get('NOSE_COVER_XML_FILE', 'coverage.xml'),
+ dest="cover_xml_file",
+ metavar="FILE",
+ help="Produce XML coverage information in file")
+
+ def configure(self, options, conf):
+ """
+ Configure plugin.
+ """
+ try:
+ self.status.pop('active')
+ except KeyError:
+ pass
+ super(Coverage, self).configure(options, conf)
+ if self.enabled:
+ try:
+ import coverage
+ if not hasattr(coverage, 'coverage'):
+ raise ImportError("Unable to import coverage module")
+ except ImportError:
+ log.error("Coverage not available: "
+ "unable to import coverage module")
+ self.enabled = False
+ return
+ self.conf = conf
+ self.coverErase = options.cover_erase
+ self.coverTests = options.cover_tests
+ self.coverPackages = []
+ if options.cover_packages:
+ if isinstance(options.cover_packages, (list, tuple)):
+ cover_packages = options.cover_packages
+ else:
+ cover_packages = [options.cover_packages]
+ for pkgs in [tolist(x) for x in cover_packages]:
+ self.coverPackages.extend(pkgs)
+ self.coverInclusive = options.cover_inclusive
+ if self.coverPackages:
+ log.info("Coverage report will include only packages: %s",
+ self.coverPackages)
+ self.coverHtmlDir = None
+ if options.cover_html:
+ self.coverHtmlDir = options.cover_html_dir
+ log.debug('Will put HTML coverage report in %s', self.coverHtmlDir)
+ self.coverBranches = options.cover_branches
+ self.coverXmlFile = None
+ if options.cover_min_percentage:
+ self.coverMinPercentage = int(options.cover_min_percentage.rstrip('%'))
+ if options.cover_xml:
+ self.coverXmlFile = options.cover_xml_file
+ log.debug('Will put XML coverage report in %s', self.coverXmlFile)
+ if self.enabled:
+ self.status['active'] = True
+ self.coverInstance = coverage.coverage(auto_data=False,
+ branch=self.coverBranches, data_suffix=conf.worker,
+ source=self.coverPackages)
+ self.coverInstance._warn_no_data = False
+ self.coverInstance.is_worker = conf.worker
+ self.coverInstance.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]')
+
+ log.debug("Coverage begin")
+ self.skipModules = sys.modules.keys()[:]
+ if self.coverErase:
+ log.debug("Clearing previously collected coverage statistics")
+ self.coverInstance.combine()
+ self.coverInstance.erase()
+
+ if not self.coverInstance.is_worker:
+ self.coverInstance.load()
+ self.coverInstance.start()
+
+
+ def beforeTest(self, *args, **kwargs):
+ """
+ Begin recording coverage information.
+ """
+
+ if self.coverInstance.is_worker:
+ self.coverInstance.load()
+ self.coverInstance.start()
+
+ def afterTest(self, *args, **kwargs):
+ """
+ Stop recording coverage information.
+ """
+
+ if self.coverInstance.is_worker:
+ self.coverInstance.stop()
+ self.coverInstance.save()
+
+
+ def report(self, stream):
+ """
+ Output code coverage report.
+ """
+ log.debug("Coverage report")
+ self.coverInstance.stop()
+ self.coverInstance.combine()
+ self.coverInstance.save()
+ modules = [module
+ for name, module in sys.modules.items()
+ if self.wantModuleCoverage(name, module)]
+ log.debug("Coverage report will cover modules: %s", modules)
+ self.coverInstance.report(modules, file=stream)
+
+ import coverage
+ if self.coverHtmlDir:
+ log.debug("Generating HTML coverage report")
+ try:
+ self.coverInstance.html_report(modules, self.coverHtmlDir)
+ except coverage.misc.CoverageException, e:
+ log.warning("Failed to generate HTML report: %s" % str(e))
+
+ if self.coverXmlFile:
+ log.debug("Generating XML coverage report")
+ try:
+ self.coverInstance.xml_report(modules, self.coverXmlFile)
+ except coverage.misc.CoverageException, e:
+ log.warning("Failed to generate XML report: %s" % str(e))
+
+ # make sure we have minimum required coverage
+ if self.coverMinPercentage:
+ f = StringIO.StringIO()
+ self.coverInstance.report(modules, file=f)
+
+ multiPackageRe = (r'-------\s\w+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
+ r'\s+(\d+)%\s+\d*\s{0,1}$')
+ singlePackageRe = (r'-------\s[\w./]+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
+ r'\s+(\d+)%(?:\s+[-\d, ]+)\s{0,1}$')
+
+ m = re.search(multiPackageRe, f.getvalue())
+ if m is None:
+ m = re.search(singlePackageRe, f.getvalue())
+
+ if m:
+ percentage = int(m.groups()[0])
+ if percentage < self.coverMinPercentage:
+ log.error('TOTAL Coverage did not reach minimum '
+ 'required: %d%%' % self.coverMinPercentage)
+ sys.exit(1)
+ else:
+ log.error("No total percentage was found in coverage output, "
+ "something went wrong.")
+
+
+ def wantModuleCoverage(self, name, module):
+ if not hasattr(module, '__file__'):
+ log.debug("no coverage of %s: no __file__", name)
+ return False
+ module_file = src(module.__file__)
+ if not module_file or not module_file.endswith('.py'):
+ log.debug("no coverage of %s: not a python file", name)
+ return False
+ if self.coverPackages:
+ for package in self.coverPackages:
+ if (re.findall(r'^%s\b' % re.escape(package), name)
+ and (self.coverTests
+ or not self.conf.testMatch.search(name))):
+ log.debug("coverage for %s", name)
+ return True
+ if name in self.skipModules:
+ log.debug("no coverage for %s: loaded before coverage start",
+ name)
+ return False
+ if self.conf.testMatch.search(name) and not self.coverTests:
+ log.debug("no coverage for %s: is a test", name)
+ return False
+ # accept any package that passed the previous tests, unless
+ # coverPackages is on -- in that case, if we wanted this
+ # module, we would have already returned True
+ return not self.coverPackages
+
+ def wantFile(self, file, package=None):
+ """If inclusive coverage enabled, return true for all source files
+ in wanted packages.
+ """
+ if self.coverInclusive:
+ if file.endswith(".py"):
+ if package and self.coverPackages:
+ for want in self.coverPackages:
+ if package.startswith(want):
+ return True
+ else:
+ return True
+ return None
diff --git a/lib/spack/external/nose/plugins/debug.py b/lib/spack/external/nose/plugins/debug.py
new file mode 100644
index 0000000000..78243e60d0
--- /dev/null
+++ b/lib/spack/external/nose/plugins/debug.py
@@ -0,0 +1,67 @@
+"""
+This plugin provides ``--pdb`` and ``--pdb-failures`` options. The ``--pdb``
+option will drop the test runner into pdb when it encounters an error. To
+drop into pdb on failure, use ``--pdb-failures``.
+"""
+
+import pdb
+from nose.plugins.base import Plugin
+
+class Pdb(Plugin):
+ """
+ Provides --pdb and --pdb-failures options that cause the test runner to
+ drop into pdb if it encounters an error or failure, respectively.
+ """
+ enabled_for_errors = False
+ enabled_for_failures = False
+ score = 5 # run last, among builtins
+
+ def options(self, parser, env):
+ """Register commandline options.
+ """
+ parser.add_option(
+ "--pdb", action="store_true", dest="debugBoth",
+ default=env.get('NOSE_PDB', False),
+ help="Drop into debugger on failures or errors")
+ parser.add_option(
+ "--pdb-failures", action="store_true",
+ dest="debugFailures",
+ default=env.get('NOSE_PDB_FAILURES', False),
+ help="Drop into debugger on failures")
+ parser.add_option(
+ "--pdb-errors", action="store_true",
+ dest="debugErrors",
+ default=env.get('NOSE_PDB_ERRORS', False),
+ help="Drop into debugger on errors")
+
+ def configure(self, options, conf):
+ """Configure which kinds of exceptions trigger plugin.
+ """
+ self.conf = conf
+ self.enabled_for_errors = options.debugErrors or options.debugBoth
+ self.enabled_for_failures = options.debugFailures or options.debugBoth
+ self.enabled = self.enabled_for_failures or self.enabled_for_errors
+
+ def addError(self, test, err):
+ """Enter pdb if configured to debug errors.
+ """
+ if not self.enabled_for_errors:
+ return
+ self.debug(err)
+
+ def addFailure(self, test, err):
+ """Enter pdb if configured to debug failures.
+ """
+ if not self.enabled_for_failures:
+ return
+ self.debug(err)
+
+ def debug(self, err):
+ import sys # FIXME why is this import here?
+ ec, ev, tb = err
+ stdout = sys.stdout
+ sys.stdout = sys.__stdout__
+ try:
+ pdb.post_mortem(tb)
+ finally:
+ sys.stdout = stdout
diff --git a/lib/spack/external/nose/plugins/deprecated.py b/lib/spack/external/nose/plugins/deprecated.py
new file mode 100644
index 0000000000..461a26be63
--- /dev/null
+++ b/lib/spack/external/nose/plugins/deprecated.py
@@ -0,0 +1,45 @@
+"""
+This plugin installs a DEPRECATED error class for the :class:`DeprecatedTest`
+exception. When :class:`DeprecatedTest` is raised, the exception will be logged
+in the deprecated attribute of the result, ``D`` or ``DEPRECATED`` (verbose)
+will be output, and the exception will not be counted as an error or failure.
+It is enabled by default, but can be turned off by using ``--no-deprecated``.
+"""
+
+from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
+
+
+class DeprecatedTest(Exception):
+ """Raise this exception to mark a test as deprecated.
+ """
+ pass
+
+
+class Deprecated(ErrorClassPlugin):
+ """
+ Installs a DEPRECATED error class for the DeprecatedTest exception. Enabled
+ by default.
+ """
+ enabled = True
+ deprecated = ErrorClass(DeprecatedTest,
+ label='DEPRECATED',
+ isfailure=False)
+
+ def options(self, parser, env):
+ """Register commandline options.
+ """
+ env_opt = 'NOSE_WITHOUT_DEPRECATED'
+ parser.add_option('--no-deprecated', action='store_true',
+ dest='noDeprecated', default=env.get(env_opt, False),
+ help="Disable special handling of DeprecatedTest "
+ "exceptions.")
+
+ def configure(self, options, conf):
+ """Configure plugin.
+ """
+ if not self.can_configure:
+ return
+ self.conf = conf
+ disable = getattr(options, 'noDeprecated', False)
+ if disable:
+ self.enabled = False
diff --git a/lib/spack/external/nose/plugins/doctests.py b/lib/spack/external/nose/plugins/doctests.py
new file mode 100644
index 0000000000..5ef65799f3
--- /dev/null
+++ b/lib/spack/external/nose/plugins/doctests.py
@@ -0,0 +1,455 @@
+"""Use the Doctest plugin with ``--with-doctest`` or the NOSE_WITH_DOCTEST
+environment variable to enable collection and execution of :mod:`doctests
+<doctest>`. Because doctests are usually included in the tested package
+(instead of being grouped into packages or modules of their own), nose only
+looks for them in the non-test packages it discovers in the working directory.
+
+Doctests may also be placed into files other than python modules, in which
+case they can be collected and executed by using the ``--doctest-extension``
+switch or NOSE_DOCTEST_EXTENSION environment variable to indicate which file
+extension(s) to load.
+
+When loading doctests from non-module files, use the ``--doctest-fixtures``
+switch to specify how to find modules containing fixtures for the tests. A
+module name will be produced by appending the value of that switch to the base
+name of each doctest file loaded. For example, a doctest file "widgets.rst"
+with the switch ``--doctest_fixtures=_fixt`` will load fixtures from the module
+``widgets_fixt.py``.
+
+A fixtures module may define any or all of the following functions:
+
+* setup([module]) or setup_module([module])
+
+ Called before the test runs. You may raise SkipTest to skip all tests.
+
+* teardown([module]) or teardown_module([module])
+
+ Called after the test runs, if setup/setup_module did not raise an
+ unhandled exception.
+
+* setup_test(test)
+
+ Called before the test. NOTE: the argument passed is a
+ doctest.DocTest instance, *not* a unittest.TestCase.
+
+* teardown_test(test)
+
+ Called after the test, if setup_test did not raise an exception. NOTE: the
+ argument passed is a doctest.DocTest instance, *not* a unittest.TestCase.
+
+Doctests are run like any other test, with the exception that output
+capture does not work; doctest does its own output capture while running a
+test.
+
+.. note ::
+
+ See :doc:`../doc_tests/test_doctest_fixtures/doctest_fixtures` for
+ additional documentation and examples.
+
+"""
+from __future__ import generators
+
+import logging
+import os
+import sys
+import unittest
+from inspect import getmodule
+from nose.plugins.base import Plugin
+from nose.suite import ContextList
+from nose.util import anyp, getpackage, test_address, resolve_name, \
+ src, tolist, isproperty
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+import sys
+import __builtin__ as builtin_mod
+
+log = logging.getLogger(__name__)
+
+try:
+ import doctest
+ doctest.DocTestCase
+ # system version of doctest is acceptable, but needs a monkeypatch
+except (ImportError, AttributeError):
+ # system version is too old
+ import nose.ext.dtcompat as doctest
+
+
+#
+# Doctest and coverage don't get along, so we need to create
+# a monkeypatch that will replace the part of doctest that
+# interferes with coverage reports.
+#
+# The monkeypatch is based on this zope patch:
+# http://svn.zope.org/Zope3/trunk/src/zope/testing/doctest.py?rev=28679&r1=28703&r2=28705
+#
+_orp = doctest._OutputRedirectingPdb
+
+class NoseOutputRedirectingPdb(_orp):
+ def __init__(self, out):
+ self.__debugger_used = False
+ _orp.__init__(self, out)
+
+ def set_trace(self):
+ self.__debugger_used = True
+ _orp.set_trace(self, sys._getframe().f_back)
+
+ def set_continue(self):
+ # Calling set_continue unconditionally would break unit test
+ # coverage reporting, as Bdb.set_continue calls sys.settrace(None).
+ if self.__debugger_used:
+ _orp.set_continue(self)
+doctest._OutputRedirectingPdb = NoseOutputRedirectingPdb
+
+
+class DoctestSuite(unittest.TestSuite):
+ """
+ Doctest suites are parallelizable at the module or file level only,
+ since they may be attached to objects that are not individually
+ addressable (like properties). This suite subclass is used when
+ loading doctests from a module to ensure that behavior.
+
+ This class is used only if the plugin is not fully prepared;
+ in normal use, the loader's suiteClass is used.
+
+ """
+ can_split = False
+
+ def __init__(self, tests=(), context=None, can_split=False):
+ self.context = context
+ self.can_split = can_split
+ unittest.TestSuite.__init__(self, tests=tests)
+
+ def address(self):
+ return test_address(self.context)
+
+ def __iter__(self):
+ # 2.3 compat
+ return iter(self._tests)
+
+ def __str__(self):
+ return str(self._tests)
+
+
+class Doctest(Plugin):
+ """
+ Activate doctest plugin to find and run doctests in non-test modules.
+ """
+ extension = None
+ suiteClass = DoctestSuite
+
+ def options(self, parser, env):
+ """Register commmandline options.
+ """
+ Plugin.options(self, parser, env)
+ parser.add_option('--doctest-tests', action='store_true',
+ dest='doctest_tests',
+ default=env.get('NOSE_DOCTEST_TESTS'),
+ help="Also look for doctests in test modules. "
+ "Note that classes, methods and functions should "
+ "have either doctests or non-doctest tests, "
+ "not both. [NOSE_DOCTEST_TESTS]")
+ parser.add_option('--doctest-extension', action="append",
+ dest="doctestExtension",
+ metavar="EXT",
+ help="Also look for doctests in files with "
+ "this extension [NOSE_DOCTEST_EXTENSION]")
+ parser.add_option('--doctest-result-variable',
+ dest='doctest_result_var',
+ default=env.get('NOSE_DOCTEST_RESULT_VAR'),
+ metavar="VAR",
+ help="Change the variable name set to the result of "
+ "the last interpreter command from the default '_'. "
+ "Can be used to avoid conflicts with the _() "
+ "function used for text translation. "
+ "[NOSE_DOCTEST_RESULT_VAR]")
+ parser.add_option('--doctest-fixtures', action="store",
+ dest="doctestFixtures",
+ metavar="SUFFIX",
+ help="Find fixtures for a doctest file in module "
+ "with this name appended to the base name "
+ "of the doctest file")
+ parser.add_option('--doctest-options', action="append",
+ dest="doctestOptions",
+ metavar="OPTIONS",
+ help="Specify options to pass to doctest. " +
+ "Eg. '+ELLIPSIS,+NORMALIZE_WHITESPACE'")
+ # Set the default as a list, if given in env; otherwise
+ # an additional value set on the command line will cause
+ # an error.
+ env_setting = env.get('NOSE_DOCTEST_EXTENSION')
+ if env_setting is not None:
+ parser.set_defaults(doctestExtension=tolist(env_setting))
+
+ def configure(self, options, config):
+ """Configure plugin.
+ """
+ Plugin.configure(self, options, config)
+ self.doctest_result_var = options.doctest_result_var
+ self.doctest_tests = options.doctest_tests
+ self.extension = tolist(options.doctestExtension)
+ self.fixtures = options.doctestFixtures
+ self.finder = doctest.DocTestFinder()
+ self.optionflags = 0
+ if options.doctestOptions:
+ flags = ",".join(options.doctestOptions).split(',')
+ for flag in flags:
+ if not flag or flag[0] not in '+-':
+ raise ValueError(
+ "Must specify doctest options with starting " +
+ "'+' or '-'. Got %s" % (flag,))
+ mode, option_name = flag[0], flag[1:]
+ option_flag = doctest.OPTIONFLAGS_BY_NAME.get(option_name)
+ if not option_flag:
+ raise ValueError("Unknown doctest option %s" %
+ (option_name,))
+ if mode == '+':
+ self.optionflags |= option_flag
+ elif mode == '-':
+ self.optionflags &= ~option_flag
+
+ def prepareTestLoader(self, loader):
+ """Capture loader's suiteClass.
+
+ This is used to create test suites from doctest files.
+
+ """
+ self.suiteClass = loader.suiteClass
+
+ def loadTestsFromModule(self, module):
+ """Load doctests from the module.
+ """
+ log.debug("loading from %s", module)
+ if not self.matches(module.__name__):
+ log.debug("Doctest doesn't want module %s", module)
+ return
+ try:
+ tests = self.finder.find(module)
+ except AttributeError:
+ log.exception("Attribute error loading from %s", module)
+ # nose allows module.__test__ = False; doctest does not and throws
+ # AttributeError
+ return
+ if not tests:
+ log.debug("No tests found in %s", module)
+ return
+ tests.sort()
+ module_file = src(module.__file__)
+ # FIXME this breaks the id plugin somehow (tests probably don't
+ # get wrapped in result proxy or something)
+ cases = []
+ for test in tests:
+ if not test.examples:
+ continue
+ if not test.filename:
+ test.filename = module_file
+ cases.append(DocTestCase(test,
+ optionflags=self.optionflags,
+ result_var=self.doctest_result_var))
+ if cases:
+ yield self.suiteClass(cases, context=module, can_split=False)
+
+ def loadTestsFromFile(self, filename):
+ """Load doctests from the file.
+
+ Tests are loaded only if filename's extension matches
+ configured doctest extension.
+
+ """
+ if self.extension and anyp(filename.endswith, self.extension):
+ name = os.path.basename(filename)
+ dh = open(filename)
+ try:
+ doc = dh.read()
+ finally:
+ dh.close()
+
+ fixture_context = None
+ globs = {'__file__': filename}
+ if self.fixtures:
+ base, ext = os.path.splitext(name)
+ dirname = os.path.dirname(filename)
+ sys.path.append(dirname)
+ fixt_mod = base + self.fixtures
+ try:
+ fixture_context = __import__(
+ fixt_mod, globals(), locals(), ["nop"])
+ except ImportError, e:
+ log.debug(
+ "Could not import %s: %s (%s)", fixt_mod, e, sys.path)
+ log.debug("Fixture module %s resolved to %s",
+ fixt_mod, fixture_context)
+ if hasattr(fixture_context, 'globs'):
+ globs = fixture_context.globs(globs)
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(
+ doc, globs=globs, name=name,
+ filename=filename, lineno=0)
+ if test.examples:
+ case = DocFileCase(
+ test,
+ optionflags=self.optionflags,
+ setUp=getattr(fixture_context, 'setup_test', None),
+ tearDown=getattr(fixture_context, 'teardown_test', None),
+ result_var=self.doctest_result_var)
+ if fixture_context:
+ yield ContextList((case,), context=fixture_context)
+ else:
+ yield case
+ else:
+ yield False # no tests to load
+
+ def makeTest(self, obj, parent):
+ """Look for doctests in the given object, which will be a
+ function, method or class.
+ """
+ name = getattr(obj, '__name__', 'Unnammed %s' % type(obj))
+ doctests = self.finder.find(obj, module=getmodule(parent), name=name)
+ if doctests:
+ for test in doctests:
+ if len(test.examples) == 0:
+ continue
+ yield DocTestCase(test, obj=obj, optionflags=self.optionflags,
+ result_var=self.doctest_result_var)
+
+ def matches(self, name):
+ # FIXME this seems wrong -- nothing is ever going to
+ # fail this test, since we're given a module NAME not FILE
+ if name == '__init__.py':
+ return False
+ # FIXME don't think we need include/exclude checks here?
+ return ((self.doctest_tests or not self.conf.testMatch.search(name)
+ or (self.conf.include
+ and filter(None,
+ [inc.search(name)
+ for inc in self.conf.include])))
+ and (not self.conf.exclude
+ or not filter(None,
+ [exc.search(name)
+ for exc in self.conf.exclude])))
+
+ def wantFile(self, file):
+ """Override to select all modules and any file ending with
+ configured doctest extension.
+ """
+ # always want .py files
+ if file.endswith('.py'):
+ return True
+ # also want files that match my extension
+ if (self.extension
+ and anyp(file.endswith, self.extension)
+ and (not self.conf.exclude
+ or not filter(None,
+ [exc.search(file)
+ for exc in self.conf.exclude]))):
+ return True
+ return None
+
+
+class DocTestCase(doctest.DocTestCase):
+ """Overrides DocTestCase to
+ provide an address() method that returns the correct address for
+ the doctest case. To provide hints for address(), an obj may also
+ be passed -- this will be used as the test object for purposes of
+ determining the test address, if it is provided.
+ """
+ def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
+ checker=None, obj=None, result_var='_'):
+ self._result_var = result_var
+ self._nose_obj = obj
+ super(DocTestCase, self).__init__(
+ test, optionflags=optionflags, setUp=setUp, tearDown=tearDown,
+ checker=checker)
+
+ def address(self):
+ if self._nose_obj is not None:
+ return test_address(self._nose_obj)
+ obj = resolve_name(self._dt_test.name)
+
+ if isproperty(obj):
+ # properties have no connection to the class they are in
+ # so we can't just look 'em up, we have to first look up
+ # the class, then stick the prop on the end
+ parts = self._dt_test.name.split('.')
+ class_name = '.'.join(parts[:-1])
+ cls = resolve_name(class_name)
+ base_addr = test_address(cls)
+ return (base_addr[0], base_addr[1],
+ '.'.join([base_addr[2], parts[-1]]))
+ else:
+ return test_address(obj)
+
+ # doctests loaded via find(obj) omit the module name
+ # so we need to override id, __repr__ and shortDescription
+ # bonus: this will squash a 2.3 vs 2.4 incompatiblity
+ def id(self):
+ name = self._dt_test.name
+ filename = self._dt_test.filename
+ if filename is not None:
+ pk = getpackage(filename)
+ if pk is None:
+ return name
+ if not name.startswith(pk):
+ name = "%s.%s" % (pk, name)
+ return name
+
+ def __repr__(self):
+ name = self.id()
+ name = name.split('.')
+ return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
+ __str__ = __repr__
+
+ def shortDescription(self):
+ return 'Doctest: %s' % self.id()
+
+ def setUp(self):
+ if self._result_var is not None:
+ self._old_displayhook = sys.displayhook
+ sys.displayhook = self._displayhook
+ super(DocTestCase, self).setUp()
+
+ def _displayhook(self, value):
+ if value is None:
+ return
+ setattr(builtin_mod, self._result_var, value)
+ print repr(value)
+
+ def tearDown(self):
+ super(DocTestCase, self).tearDown()
+ if self._result_var is not None:
+ sys.displayhook = self._old_displayhook
+ delattr(builtin_mod, self._result_var)
+
+
+class DocFileCase(doctest.DocFileCase):
+ """Overrides to provide address() method that returns the correct
+ address for the doc file case.
+ """
+ def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
+ checker=None, result_var='_'):
+ self._result_var = result_var
+ super(DocFileCase, self).__init__(
+ test, optionflags=optionflags, setUp=setUp, tearDown=tearDown,
+ checker=None)
+
+ def address(self):
+ return (self._dt_test.filename, None, None)
+
+ def setUp(self):
+ if self._result_var is not None:
+ self._old_displayhook = sys.displayhook
+ sys.displayhook = self._displayhook
+ super(DocFileCase, self).setUp()
+
+ def _displayhook(self, value):
+ if value is None:
+ return
+ setattr(builtin_mod, self._result_var, value)
+ print repr(value)
+
+ def tearDown(self):
+ super(DocFileCase, self).tearDown()
+ if self._result_var is not None:
+ sys.displayhook = self._old_displayhook
+ delattr(builtin_mod, self._result_var)
diff --git a/lib/spack/external/nose/plugins/errorclass.py b/lib/spack/external/nose/plugins/errorclass.py
new file mode 100644
index 0000000000..d1540e0070
--- /dev/null
+++ b/lib/spack/external/nose/plugins/errorclass.py
@@ -0,0 +1,210 @@
+"""
+ErrorClass Plugins
+------------------
+
+ErrorClass plugins provide an easy way to add support for custom
+handling of particular classes of exceptions.
+
+An ErrorClass plugin defines one or more ErrorClasses and how each is
+handled and reported on. Each error class is stored in a different
+attribute on the result, and reported separately. Each error class must
+indicate the exceptions that fall under that class, the label to use
+for reporting, and whether exceptions of the class should be
+considered as failures for the whole test run.
+
+ErrorClasses use a declarative syntax. Assign an ErrorClass to the
+attribute you wish to add to the result object, defining the
+exceptions, label and isfailure attributes. For example, to declare an
+ErrorClassPlugin that defines TodoErrors (and subclasses of TodoError)
+as an error class with the label 'TODO' that is considered a failure,
+do this:
+
+ >>> class Todo(Exception):
+ ... pass
+ >>> class TodoError(ErrorClassPlugin):
+ ... todo = ErrorClass(Todo, label='TODO', isfailure=True)
+
+The MetaErrorClass metaclass translates the ErrorClass declarations
+into the tuples used by the error handling and reporting functions in
+the result. This is an internal format and subject to change; you
+should always use the declarative syntax for attaching ErrorClasses to
+an ErrorClass plugin.
+
+ >>> TodoError.errorClasses # doctest: +ELLIPSIS
+ ((<class ...Todo...>, ('todo', 'TODO', True)),)
+
+Let's see the plugin in action. First some boilerplate.
+
+ >>> import sys
+ >>> import unittest
+ >>> try:
+ ... # 2.7+
+ ... from unittest.runner import _WritelnDecorator
+ ... except ImportError:
+ ... from unittest import _WritelnDecorator
+ ...
+ >>> buf = _WritelnDecorator(sys.stdout)
+
+Now define a test case that raises a Todo.
+
+ >>> class TestTodo(unittest.TestCase):
+ ... def runTest(self):
+ ... raise Todo("I need to test something")
+ >>> case = TestTodo()
+
+Prepare the result using our plugin. Normally this happens during the
+course of test execution within nose -- you won't be doing this
+yourself. For the purposes of this testing document, I'm stepping
+through the internal process of nose so you can see what happens at
+each step.
+
+ >>> plugin = TodoError()
+ >>> from nose.result import _TextTestResult
+ >>> result = _TextTestResult(stream=buf, descriptions=0, verbosity=2)
+ >>> plugin.prepareTestResult(result)
+
+Now run the test. TODO is printed.
+
+ >>> _ = case(result) # doctest: +ELLIPSIS
+ runTest (....TestTodo) ... TODO: I need to test something
+
+Errors and failures are empty, but todo has our test:
+
+ >>> result.errors
+ []
+ >>> result.failures
+ []
+ >>> result.todo # doctest: +ELLIPSIS
+ [(<....TestTodo testMethod=runTest>, '...Todo: I need to test something\\n')]
+ >>> result.printErrors() # doctest: +ELLIPSIS
+ <BLANKLINE>
+ ======================================================================
+ TODO: runTest (....TestTodo)
+ ----------------------------------------------------------------------
+ Traceback (most recent call last):
+ ...
+ ...Todo: I need to test something
+ <BLANKLINE>
+
+Since we defined a Todo as a failure, the run was not successful.
+
+ >>> result.wasSuccessful()
+ False
+"""
+
+from nose.pyversion import make_instancemethod
+from nose.plugins.base import Plugin
+from nose.result import TextTestResult
+from nose.util import isclass
+
+class MetaErrorClass(type):
+ """Metaclass for ErrorClassPlugins that allows error classes to be
+ set up in a declarative manner.
+ """
+ def __init__(self, name, bases, attr):
+ errorClasses = []
+ for name, detail in attr.items():
+ if isinstance(detail, ErrorClass):
+ attr.pop(name)
+ for cls in detail:
+ errorClasses.append(
+ (cls, (name, detail.label, detail.isfailure)))
+ super(MetaErrorClass, self).__init__(name, bases, attr)
+ self.errorClasses = tuple(errorClasses)
+
+
+class ErrorClass(object):
+ def __init__(self, *errorClasses, **kw):
+ self.errorClasses = errorClasses
+ try:
+ for key in ('label', 'isfailure'):
+ setattr(self, key, kw.pop(key))
+ except KeyError:
+ raise TypeError("%r is a required named argument for ErrorClass"
+ % key)
+
+ def __iter__(self):
+ return iter(self.errorClasses)
+
+
+class ErrorClassPlugin(Plugin):
+ """
+ Base class for ErrorClass plugins. Subclass this class and declare the
+ exceptions that you wish to handle as attributes of the subclass.
+ """
+ __metaclass__ = MetaErrorClass
+ score = 1000
+ errorClasses = ()
+
+ def addError(self, test, err):
+ err_cls, a, b = err
+ if not isclass(err_cls):
+ return
+ classes = [e[0] for e in self.errorClasses]
+ if filter(lambda c: issubclass(err_cls, c), classes):
+ return True
+
+ def prepareTestResult(self, result):
+ if not hasattr(result, 'errorClasses'):
+ self.patchResult(result)
+ for cls, (storage_attr, label, isfail) in self.errorClasses:
+ if cls not in result.errorClasses:
+ storage = getattr(result, storage_attr, [])
+ setattr(result, storage_attr, storage)
+ result.errorClasses[cls] = (storage, label, isfail)
+
+ def patchResult(self, result):
+ result.printLabel = print_label_patch(result)
+ result._orig_addError, result.addError = \
+ result.addError, add_error_patch(result)
+ result._orig_wasSuccessful, result.wasSuccessful = \
+ result.wasSuccessful, wassuccessful_patch(result)
+ if hasattr(result, 'printErrors'):
+ result._orig_printErrors, result.printErrors = \
+ result.printErrors, print_errors_patch(result)
+ if hasattr(result, 'addSkip'):
+ result._orig_addSkip, result.addSkip = \
+ result.addSkip, add_skip_patch(result)
+ result.errorClasses = {}
+
+
+def add_error_patch(result):
+ """Create a new addError method to patch into a result instance
+ that recognizes the errorClasses attribute and deals with
+ errorclasses correctly.
+ """
+ return make_instancemethod(TextTestResult.addError, result)
+
+
+def print_errors_patch(result):
+ """Create a new printErrors method that prints errorClasses items
+ as well.
+ """
+ return make_instancemethod(TextTestResult.printErrors, result)
+
+
+def print_label_patch(result):
+ """Create a new printLabel method that prints errorClasses items
+ as well.
+ """
+ return make_instancemethod(TextTestResult.printLabel, result)
+
+
+def wassuccessful_patch(result):
+ """Create a new wasSuccessful method that checks errorClasses for
+ exceptions that were put into other slots than error or failure
+ but that still count as not success.
+ """
+ return make_instancemethod(TextTestResult.wasSuccessful, result)
+
+
+def add_skip_patch(result):
+ """Create a new addSkip method to patch into a result instance
+ that delegates to addError.
+ """
+ return make_instancemethod(TextTestResult.addSkip, result)
+
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/lib/spack/external/nose/plugins/failuredetail.py b/lib/spack/external/nose/plugins/failuredetail.py
new file mode 100644
index 0000000000..6462865dd0
--- /dev/null
+++ b/lib/spack/external/nose/plugins/failuredetail.py
@@ -0,0 +1,49 @@
+"""
+This plugin provides assert introspection. When the plugin is enabled
+and a test failure occurs, the traceback is displayed with extra context
+around the line in which the exception was raised. Simple variable
+substitution is also performed in the context output to provide more
+debugging information.
+"""
+
+from nose.plugins import Plugin
+from nose.pyversion import exc_to_unicode, force_unicode
+from nose.inspector import inspect_traceback
+
+class FailureDetail(Plugin):
+ """
+ Plugin that provides extra information in tracebacks of test failures.
+ """
+ score = 1600 # before capture
+
+ def options(self, parser, env):
+ """Register commmandline options.
+ """
+ parser.add_option(
+ "-d", "--detailed-errors", "--failure-detail",
+ action="store_true",
+ default=env.get('NOSE_DETAILED_ERRORS'),
+ dest="detailedErrors", help="Add detail to error"
+ " output by attempting to evaluate failed"
+ " asserts [NOSE_DETAILED_ERRORS]")
+
+ def configure(self, options, conf):
+ """Configure plugin.
+ """
+ if not self.can_configure:
+ return
+ self.enabled = options.detailedErrors
+ self.conf = conf
+
+ def formatFailure(self, test, err):
+ """Add detail from traceback inspection to error message of a failure.
+ """
+ ec, ev, tb = err
+ tbinfo, str_ev = None, exc_to_unicode(ev)
+
+ if tb:
+ tbinfo = force_unicode(inspect_traceback(tb))
+ str_ev = '\n'.join([str_ev, tbinfo])
+ test.tbinfo = tbinfo
+ return (ec, str_ev, tb)
+
diff --git a/lib/spack/external/nose/plugins/isolate.py b/lib/spack/external/nose/plugins/isolate.py
new file mode 100644
index 0000000000..13235dfbd1
--- /dev/null
+++ b/lib/spack/external/nose/plugins/isolate.py
@@ -0,0 +1,103 @@
+"""The isolation plugin resets the contents of sys.modules after running
+each test module or package. Use it by setting ``--with-isolation`` or the
+NOSE_WITH_ISOLATION environment variable.
+
+The effects are similar to wrapping the following functions around the
+import and execution of each test module::
+
+ def setup(module):
+ module._mods = sys.modules.copy()
+
+ def teardown(module):
+ to_del = [ m for m in sys.modules.keys() if m not in
+ module._mods ]
+ for mod in to_del:
+ del sys.modules[mod]
+ sys.modules.update(module._mods)
+
+Isolation works only during lazy loading. In normal use, this is only
+during discovery of modules within a directory, where the process of
+importing, loading tests and running tests from each module is
+encapsulated in a single loadTestsFromName call. This plugin
+implements loadTestsFromNames to force the same lazy-loading there,
+which allows isolation to work in directed mode as well as discovery,
+at the cost of some efficiency: lazy-loading names forces full context
+setup and teardown to run for each name, defeating the grouping that
+is normally used to ensure that context setup and teardown are run the
+fewest possible times for a given set of names.
+
+.. warning ::
+
+ This plugin should not be used in conjunction with other plugins
+ that assume that modules, once imported, will stay imported; for
+ instance, it may cause very odd results when used with the coverage
+ plugin.
+
+"""
+
+import logging
+import sys
+
+from nose.plugins import Plugin
+
+
+log = logging.getLogger('nose.plugins.isolation')
+
+class IsolationPlugin(Plugin):
+ """
+ Activate the isolation plugin to isolate changes to external
+ modules to a single test module or package. The isolation plugin
+ resets the contents of sys.modules after each test module or
+ package runs to its state before the test. PLEASE NOTE that this
+ plugin should not be used with the coverage plugin, or in any other case
+ where module reloading may produce undesirable side-effects.
+ """
+ score = 10 # I want to be last
+ name = 'isolation'
+
+ def configure(self, options, conf):
+ """Configure plugin.
+ """
+ Plugin.configure(self, options, conf)
+ self._mod_stack = []
+
+ def beforeContext(self):
+ """Copy sys.modules onto my mod stack
+ """
+ mods = sys.modules.copy()
+ self._mod_stack.append(mods)
+
+ def afterContext(self):
+ """Pop my mod stack and restore sys.modules to the state
+ it was in when mod stack was pushed.
+ """
+ mods = self._mod_stack.pop()
+ to_del = [ m for m in sys.modules.keys() if m not in mods ]
+ if to_del:
+ log.debug('removing sys modules entries: %s', to_del)
+ for mod in to_del:
+ del sys.modules[mod]
+ sys.modules.update(mods)
+
+ def loadTestsFromNames(self, names, module=None):
+ """Create a lazy suite that calls beforeContext and afterContext
+ around each name. The side-effect of this is that full context
+ fixtures will be set up and torn down around each test named.
+ """
+ # Fast path for when we don't care
+ if not names or len(names) == 1:
+ return
+ loader = self.loader
+ plugins = self.conf.plugins
+ def lazy():
+ for name in names:
+ plugins.beforeContext()
+ yield loader.loadTestsFromName(name, module=module)
+ plugins.afterContext()
+ return (loader.suiteClass(lazy), [])
+
+ def prepareTestLoader(self, loader):
+ """Get handle on test loader so we can use it in loadTestsFromNames.
+ """
+ self.loader = loader
+
diff --git a/lib/spack/external/nose/plugins/logcapture.py b/lib/spack/external/nose/plugins/logcapture.py
new file mode 100644
index 0000000000..4c9a79f6fd
--- /dev/null
+++ b/lib/spack/external/nose/plugins/logcapture.py
@@ -0,0 +1,245 @@
+"""
+This plugin captures logging statements issued during test execution. When an
+error or failure occurs, the captured log messages are attached to the running
+test in the test.capturedLogging attribute, and displayed with the error failure
+output. It is enabled by default but can be turned off with the option
+``--nologcapture``.
+
+You can filter captured logging statements with the ``--logging-filter`` option.
+If set, it specifies which logger(s) will be captured; loggers that do not match
+will be passed. Example: specifying ``--logging-filter=sqlalchemy,myapp``
+will ensure that only statements logged via sqlalchemy.engine, myapp
+or myapp.foo.bar logger will be logged.
+
+You can remove other installed logging handlers with the
+``--logging-clear-handlers`` option.
+"""
+
+import logging
+from logging import Handler
+import threading
+
+from nose.plugins.base import Plugin
+from nose.util import anyp, ln, safe_str
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+log = logging.getLogger(__name__)
+
+class FilterSet(object):
+ def __init__(self, filter_components):
+ self.inclusive, self.exclusive = self._partition(filter_components)
+
+ # @staticmethod
+ def _partition(components):
+ inclusive, exclusive = [], []
+ for component in components:
+ if component.startswith('-'):
+ exclusive.append(component[1:])
+ else:
+ inclusive.append(component)
+ return inclusive, exclusive
+ _partition = staticmethod(_partition)
+
+ def allow(self, record):
+ """returns whether this record should be printed"""
+ if not self:
+ # nothing to filter
+ return True
+ return self._allow(record) and not self._deny(record)
+
+ # @staticmethod
+ def _any_match(matchers, record):
+ """return the bool of whether `record` starts with
+ any item in `matchers`"""
+ def record_matches_key(key):
+ return record == key or record.startswith(key + '.')
+ return anyp(bool, map(record_matches_key, matchers))
+ _any_match = staticmethod(_any_match)
+
+ def _allow(self, record):
+ if not self.inclusive:
+ return True
+ return self._any_match(self.inclusive, record)
+
+ def _deny(self, record):
+ if not self.exclusive:
+ return False
+ return self._any_match(self.exclusive, record)
+
+
+class MyMemoryHandler(Handler):
+ def __init__(self, logformat, logdatefmt, filters):
+ Handler.__init__(self)
+ fmt = logging.Formatter(logformat, logdatefmt)
+ self.setFormatter(fmt)
+ self.filterset = FilterSet(filters)
+ self.buffer = []
+ def emit(self, record):
+ self.buffer.append(self.format(record))
+ def flush(self):
+ pass # do nothing
+ def truncate(self):
+ self.buffer = []
+ def filter(self, record):
+ if self.filterset.allow(record.name):
+ return Handler.filter(self, record)
+ def __getstate__(self):
+ state = self.__dict__.copy()
+ del state['lock']
+ return state
+ def __setstate__(self, state):
+ self.__dict__.update(state)
+ self.lock = threading.RLock()
+
+
+class LogCapture(Plugin):
+ """
+ Log capture plugin. Enabled by default. Disable with --nologcapture.
+ This plugin captures logging statements issued during test execution,
+ appending any output captured to the error or failure output,
+ should the test fail or raise an error.
+ """
+ enabled = True
+ env_opt = 'NOSE_NOLOGCAPTURE'
+ name = 'logcapture'
+ score = 500
+ logformat = '%(name)s: %(levelname)s: %(message)s'
+ logdatefmt = None
+ clear = False
+ filters = ['-nose']
+
+ def options(self, parser, env):
+ """Register commandline options.
+ """
+ parser.add_option(
+ "--nologcapture", action="store_false",
+ default=not env.get(self.env_opt), dest="logcapture",
+ help="Disable logging capture plugin. "
+ "Logging configuration will be left intact."
+ " [NOSE_NOLOGCAPTURE]")
+ parser.add_option(
+ "--logging-format", action="store", dest="logcapture_format",
+ default=env.get('NOSE_LOGFORMAT') or self.logformat,
+ metavar="FORMAT",
+ help="Specify custom format to print statements. "
+ "Uses the same format as used by standard logging handlers."
+ " [NOSE_LOGFORMAT]")
+ parser.add_option(
+ "--logging-datefmt", action="store", dest="logcapture_datefmt",
+ default=env.get('NOSE_LOGDATEFMT') or self.logdatefmt,
+ metavar="FORMAT",
+ help="Specify custom date/time format to print statements. "
+ "Uses the same format as used by standard logging handlers."
+ " [NOSE_LOGDATEFMT]")
+ parser.add_option(
+ "--logging-filter", action="store", dest="logcapture_filters",
+ default=env.get('NOSE_LOGFILTER'),
+ metavar="FILTER",
+ help="Specify which statements to filter in/out. "
+ "By default, everything is captured. If the output is too"
+ " verbose,\nuse this option to filter out needless output.\n"
+ "Example: filter=foo will capture statements issued ONLY to\n"
+ " foo or foo.what.ever.sub but not foobar or other logger.\n"
+ "Specify multiple loggers with comma: filter=foo,bar,baz.\n"
+ "If any logger name is prefixed with a minus, eg filter=-foo,\n"
+ "it will be excluded rather than included. Default: "
+ "exclude logging messages from nose itself (-nose)."
+ " [NOSE_LOGFILTER]\n")
+ parser.add_option(
+ "--logging-clear-handlers", action="store_true",
+ default=False, dest="logcapture_clear",
+ help="Clear all other logging handlers")
+ parser.add_option(
+ "--logging-level", action="store",
+ default='NOTSET', dest="logcapture_level",
+ help="Set the log level to capture")
+
+ def configure(self, options, conf):
+ """Configure plugin.
+ """
+ self.conf = conf
+ # Disable if explicitly disabled, or if logging is
+ # configured via logging config file
+ if not options.logcapture or conf.loggingConfig:
+ self.enabled = False
+ self.logformat = options.logcapture_format
+ self.logdatefmt = options.logcapture_datefmt
+ self.clear = options.logcapture_clear
+ self.loglevel = options.logcapture_level
+ if options.logcapture_filters:
+ self.filters = options.logcapture_filters.split(',')
+
+ def setupLoghandler(self):
+ # setup our handler with root logger
+ root_logger = logging.getLogger()
+ if self.clear:
+ if hasattr(root_logger, "handlers"):
+ for handler in root_logger.handlers:
+ root_logger.removeHandler(handler)
+ for logger in logging.Logger.manager.loggerDict.values():
+ if hasattr(logger, "handlers"):
+ for handler in logger.handlers:
+ logger.removeHandler(handler)
+ # make sure there isn't one already
+ # you can't simply use "if self.handler not in root_logger.handlers"
+ # since at least in unit tests this doesn't work --
+ # LogCapture() is instantiated for each test case while root_logger
+ # is module global
+ # so we always add new MyMemoryHandler instance
+ for handler in root_logger.handlers[:]:
+ if isinstance(handler, MyMemoryHandler):
+ root_logger.handlers.remove(handler)
+ root_logger.addHandler(self.handler)
+ # to make sure everything gets captured
+ loglevel = getattr(self, "loglevel", "NOTSET")
+ root_logger.setLevel(getattr(logging, loglevel))
+
+ def begin(self):
+ """Set up logging handler before test run begins.
+ """
+ self.start()
+
+ def start(self):
+ self.handler = MyMemoryHandler(self.logformat, self.logdatefmt,
+ self.filters)
+ self.setupLoghandler()
+
+ def end(self):
+ pass
+
+ def beforeTest(self, test):
+ """Clear buffers and handlers before test.
+ """
+ self.setupLoghandler()
+
+ def afterTest(self, test):
+ """Clear buffers after test.
+ """
+ self.handler.truncate()
+
+ def formatFailure(self, test, err):
+ """Add captured log messages to failure output.
+ """
+ return self.formatError(test, err)
+
+ def formatError(self, test, err):
+ """Add captured log messages to error output.
+ """
+ # logic flow copied from Capture.formatError
+ test.capturedLogging = records = self.formatLogRecords()
+ if not records:
+ return err
+ ec, ev, tb = err
+ return (ec, self.addCaptureToErr(ev, records), tb)
+
+ def formatLogRecords(self):
+ return map(safe_str, self.handler.buffer)
+
+ def addCaptureToErr(self, ev, records):
+ return '\n'.join([safe_str(ev), ln('>> begin captured logging <<')] + \
+ records + \
+ [ln('>> end captured logging <<')])
diff --git a/lib/spack/external/nose/plugins/manager.py b/lib/spack/external/nose/plugins/manager.py
new file mode 100644
index 0000000000..4d2ed22b6f
--- /dev/null
+++ b/lib/spack/external/nose/plugins/manager.py
@@ -0,0 +1,460 @@
+"""
+Plugin Manager
+--------------
+
+A plugin manager class is used to load plugins, manage the list of
+loaded plugins, and proxy calls to those plugins.
+
+The plugin managers provided with nose are:
+
+:class:`PluginManager`
+ This manager doesn't implement loadPlugins, so it can only work
+ with a static list of plugins.
+
+:class:`BuiltinPluginManager`
+ This manager loads plugins referenced in ``nose.plugins.builtin``.
+
+:class:`EntryPointPluginManager`
+ This manager uses setuptools entrypoints to load plugins.
+
+:class:`ExtraPluginsPluginManager`
+ This manager loads extra plugins specified with the keyword
+ `addplugins`.
+
+:class:`DefaultPluginMananger`
+ This is the manager class that will be used by default. If
+ setuptools is installed, it is a subclass of
+ :class:`EntryPointPluginManager` and :class:`BuiltinPluginManager`;
+ otherwise, an alias to :class:`BuiltinPluginManager`.
+
+:class:`RestrictedPluginManager`
+ This manager is for use in test runs where some plugin calls are
+ not available, such as runs started with ``python setup.py test``,
+ where the test runner is the default unittest :class:`TextTestRunner`. It
+ is a subclass of :class:`DefaultPluginManager`.
+
+Writing a plugin manager
+========================
+
+If you want to load plugins via some other means, you can write a
+plugin manager and pass an instance of your plugin manager class when
+instantiating the :class:`nose.config.Config` instance that you pass to
+:class:`TestProgram` (or :func:`main` or :func:`run`).
+
+To implement your plugin loading scheme, implement ``loadPlugins()``,
+and in that method, call ``addPlugin()`` with an instance of each plugin
+you wish to make available. Make sure to call
+``super(self).loadPlugins()`` as well if have subclassed a manager
+other than ``PluginManager``.
+
+"""
+import inspect
+import logging
+import os
+import sys
+from itertools import chain as iterchain
+from warnings import warn
+import nose.config
+from nose.failure import Failure
+from nose.plugins.base import IPluginInterface
+from nose.pyversion import sort_list
+
+try:
+ import cPickle as pickle
+except:
+ import pickle
+try:
+ from cStringIO import StringIO
+except:
+ from StringIO import StringIO
+
+
+__all__ = ['DefaultPluginManager', 'PluginManager', 'EntryPointPluginManager',
+ 'BuiltinPluginManager', 'RestrictedPluginManager']
+
+log = logging.getLogger(__name__)
+
+
+class PluginProxy(object):
+ """Proxy for plugin calls. Essentially a closure bound to the
+ given call and plugin list.
+
+ The plugin proxy also must be bound to a particular plugin
+ interface specification, so that it knows what calls are available
+ and any special handling that is required for each call.
+ """
+ interface = IPluginInterface
+ def __init__(self, call, plugins):
+ try:
+ self.method = getattr(self.interface, call)
+ except AttributeError:
+ raise AttributeError("%s is not a valid %s method"
+ % (call, self.interface.__name__))
+ self.call = self.makeCall(call)
+ self.plugins = []
+ for p in plugins:
+ self.addPlugin(p, call)
+
+ def __call__(self, *arg, **kw):
+ return self.call(*arg, **kw)
+
+ def addPlugin(self, plugin, call):
+ """Add plugin to my list of plugins to call, if it has the attribute
+ I'm bound to.
+ """
+ meth = getattr(plugin, call, None)
+ if meth is not None:
+ if call == 'loadTestsFromModule' and \
+ len(inspect.getargspec(meth)[0]) == 2:
+ orig_meth = meth
+ meth = lambda module, path, **kwargs: orig_meth(module)
+ self.plugins.append((plugin, meth))
+
+ def makeCall(self, call):
+ if call == 'loadTestsFromNames':
+ # special case -- load tests from names behaves somewhat differently
+ # from other chainable calls, because plugins return a tuple, only
+ # part of which can be chained to the next plugin.
+ return self._loadTestsFromNames
+
+ meth = self.method
+ if getattr(meth, 'generative', False):
+ # call all plugins and yield a flattened iterator of their results
+ return lambda *arg, **kw: list(self.generate(*arg, **kw))
+ elif getattr(meth, 'chainable', False):
+ return self.chain
+ else:
+ # return a value from the first plugin that returns non-None
+ return self.simple
+
+ def chain(self, *arg, **kw):
+ """Call plugins in a chain, where the result of each plugin call is
+ sent to the next plugin as input. The final output result is returned.
+ """
+ result = None
+ # extract the static arguments (if any) from arg so they can
+ # be passed to each plugin call in the chain
+ static = [a for (static, a)
+ in zip(getattr(self.method, 'static_args', []), arg)
+ if static]
+ for p, meth in self.plugins:
+ result = meth(*arg, **kw)
+ arg = static[:]
+ arg.append(result)
+ return result
+
+ def generate(self, *arg, **kw):
+ """Call all plugins, yielding each item in each non-None result.
+ """
+ for p, meth in self.plugins:
+ result = None
+ try:
+ result = meth(*arg, **kw)
+ if result is not None:
+ for r in result:
+ yield r
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ exc = sys.exc_info()
+ yield Failure(*exc)
+ continue
+
+ def simple(self, *arg, **kw):
+ """Call all plugins, returning the first non-None result.
+ """
+ for p, meth in self.plugins:
+ result = meth(*arg, **kw)
+ if result is not None:
+ return result
+
+ def _loadTestsFromNames(self, names, module=None):
+ """Chainable but not quite normal. Plugins return a tuple of
+ (tests, names) after processing the names. The tests are added
+ to a suite that is accumulated throughout the full call, while
+ names are input for the next plugin in the chain.
+ """
+ suite = []
+ for p, meth in self.plugins:
+ result = meth(names, module=module)
+ if result is not None:
+ suite_part, names = result
+ if suite_part:
+ suite.extend(suite_part)
+ return suite, names
+
+
+class NoPlugins(object):
+ """Null Plugin manager that has no plugins."""
+ interface = IPluginInterface
+ def __init__(self):
+ self._plugins = self.plugins = ()
+
+ def __iter__(self):
+ return ()
+
+ def _doNothing(self, *args, **kwds):
+ pass
+
+ def _emptyIterator(self, *args, **kwds):
+ return ()
+
+ def __getattr__(self, call):
+ method = getattr(self.interface, call)
+ if getattr(method, "generative", False):
+ return self._emptyIterator
+ else:
+ return self._doNothing
+
+ def addPlugin(self, plug):
+ raise NotImplementedError()
+
+ def addPlugins(self, plugins):
+ raise NotImplementedError()
+
+ def configure(self, options, config):
+ pass
+
+ def loadPlugins(self):
+ pass
+
+ def sort(self):
+ pass
+
+
+class PluginManager(object):
+ """Base class for plugin managers. PluginManager is intended to be
+ used only with a static list of plugins. The loadPlugins() implementation
+ only reloads plugins from _extraplugins to prevent those from being
+ overridden by a subclass.
+
+ The basic functionality of a plugin manager is to proxy all unknown
+ attributes through a ``PluginProxy`` to a list of plugins.
+
+ Note that the list of plugins *may not* be changed after the first plugin
+ call.
+ """
+ proxyClass = PluginProxy
+
+ def __init__(self, plugins=(), proxyClass=None):
+ self._plugins = []
+ self._extraplugins = ()
+ self._proxies = {}
+ if plugins:
+ self.addPlugins(plugins)
+ if proxyClass is not None:
+ self.proxyClass = proxyClass
+
+ def __getattr__(self, call):
+ try:
+ return self._proxies[call]
+ except KeyError:
+ proxy = self.proxyClass(call, self._plugins)
+ self._proxies[call] = proxy
+ return proxy
+
+ def __iter__(self):
+ return iter(self.plugins)
+
+ def addPlugin(self, plug):
+ # allow, for instance, plugins loaded via entry points to
+ # supplant builtin plugins.
+ new_name = getattr(plug, 'name', object())
+ self._plugins[:] = [p for p in self._plugins
+ if getattr(p, 'name', None) != new_name]
+ self._plugins.append(plug)
+
+ def addPlugins(self, plugins=(), extraplugins=()):
+ """extraplugins are maintained in a separate list and
+ re-added by loadPlugins() to prevent their being overwritten
+ by plugins added by a subclass of PluginManager
+ """
+ self._extraplugins = extraplugins
+ for plug in iterchain(plugins, extraplugins):
+ self.addPlugin(plug)
+
+ def configure(self, options, config):
+ """Configure the set of plugins with the given options
+ and config instance. After configuration, disabled plugins
+ are removed from the plugins list.
+ """
+ log.debug("Configuring plugins")
+ self.config = config
+ cfg = PluginProxy('configure', self._plugins)
+ cfg(options, config)
+ enabled = [plug for plug in self._plugins if plug.enabled]
+ self.plugins = enabled
+ self.sort()
+ log.debug("Plugins enabled: %s", enabled)
+
+ def loadPlugins(self):
+ for plug in self._extraplugins:
+ self.addPlugin(plug)
+
+ def sort(self):
+ return sort_list(self._plugins, lambda x: getattr(x, 'score', 1), reverse=True)
+
+ def _get_plugins(self):
+ return self._plugins
+
+ def _set_plugins(self, plugins):
+ self._plugins = []
+ self.addPlugins(plugins)
+
+ plugins = property(_get_plugins, _set_plugins, None,
+ """Access the list of plugins managed by
+ this plugin manager""")
+
+
+class ZeroNinePlugin:
+ """Proxy for 0.9 plugins, adapts 0.10 calls to 0.9 standard.
+ """
+ def __init__(self, plugin):
+ self.plugin = plugin
+
+ def options(self, parser, env=os.environ):
+ self.plugin.add_options(parser, env)
+
+ def addError(self, test, err):
+ if not hasattr(self.plugin, 'addError'):
+ return
+ # switch off to addSkip, addDeprecated if those types
+ from nose.exc import SkipTest, DeprecatedTest
+ ec, ev, tb = err
+ if issubclass(ec, SkipTest):
+ if not hasattr(self.plugin, 'addSkip'):
+ return
+ return self.plugin.addSkip(test.test)
+ elif issubclass(ec, DeprecatedTest):
+ if not hasattr(self.plugin, 'addDeprecated'):
+ return
+ return self.plugin.addDeprecated(test.test)
+ # add capt
+ capt = test.capturedOutput
+ return self.plugin.addError(test.test, err, capt)
+
+ def loadTestsFromFile(self, filename):
+ if hasattr(self.plugin, 'loadTestsFromPath'):
+ return self.plugin.loadTestsFromPath(filename)
+
+ def addFailure(self, test, err):
+ if not hasattr(self.plugin, 'addFailure'):
+ return
+ # add capt and tbinfo
+ capt = test.capturedOutput
+ tbinfo = test.tbinfo
+ return self.plugin.addFailure(test.test, err, capt, tbinfo)
+
+ def addSuccess(self, test):
+ if not hasattr(self.plugin, 'addSuccess'):
+ return
+ capt = test.capturedOutput
+ self.plugin.addSuccess(test.test, capt)
+
+ def startTest(self, test):
+ if not hasattr(self.plugin, 'startTest'):
+ return
+ return self.plugin.startTest(test.test)
+
+ def stopTest(self, test):
+ if not hasattr(self.plugin, 'stopTest'):
+ return
+ return self.plugin.stopTest(test.test)
+
+ def __getattr__(self, val):
+ return getattr(self.plugin, val)
+
+
+class EntryPointPluginManager(PluginManager):
+ """Plugin manager that loads plugins from the `nose.plugins` and
+ `nose.plugins.0.10` entry points.
+ """
+ entry_points = (('nose.plugins.0.10', None),
+ ('nose.plugins', ZeroNinePlugin))
+
+ def loadPlugins(self):
+ """Load plugins by iterating the `nose.plugins` entry point.
+ """
+ from pkg_resources import iter_entry_points
+ loaded = {}
+ for entry_point, adapt in self.entry_points:
+ for ep in iter_entry_points(entry_point):
+ if ep.name in loaded:
+ continue
+ loaded[ep.name] = True
+ log.debug('%s load plugin %s', self.__class__.__name__, ep)
+ try:
+ plugcls = ep.load()
+ except KeyboardInterrupt:
+ raise
+ except Exception, e:
+ # never want a plugin load to kill the test run
+ # but we can't log here because the logger is not yet
+ # configured
+ warn("Unable to load plugin %s: %s" % (ep, e),
+ RuntimeWarning)
+ continue
+ if adapt:
+ plug = adapt(plugcls())
+ else:
+ plug = plugcls()
+ self.addPlugin(plug)
+ super(EntryPointPluginManager, self).loadPlugins()
+
+
+class BuiltinPluginManager(PluginManager):
+ """Plugin manager that loads plugins from the list in
+ `nose.plugins.builtin`.
+ """
+ def loadPlugins(self):
+ """Load plugins in nose.plugins.builtin
+ """
+ from nose.plugins import builtin
+ for plug in builtin.plugins:
+ self.addPlugin(plug())
+ super(BuiltinPluginManager, self).loadPlugins()
+
+try:
+ import pkg_resources
+ class DefaultPluginManager(EntryPointPluginManager, BuiltinPluginManager):
+ pass
+
+except ImportError:
+ class DefaultPluginManager(BuiltinPluginManager):
+ pass
+
+class RestrictedPluginManager(DefaultPluginManager):
+ """Plugin manager that restricts the plugin list to those not
+ excluded by a list of exclude methods. Any plugin that implements
+ an excluded method will be removed from the manager's plugin list
+ after plugins are loaded.
+ """
+ def __init__(self, plugins=(), exclude=(), load=True):
+ DefaultPluginManager.__init__(self, plugins)
+ self.load = load
+ self.exclude = exclude
+ self.excluded = []
+ self._excludedOpts = None
+
+ def excludedOption(self, name):
+ if self._excludedOpts is None:
+ from optparse import OptionParser
+ self._excludedOpts = OptionParser(add_help_option=False)
+ for plugin in self.excluded:
+ plugin.options(self._excludedOpts, env={})
+ return self._excludedOpts.get_option('--' + name)
+
+ def loadPlugins(self):
+ if self.load:
+ DefaultPluginManager.loadPlugins(self)
+ allow = []
+ for plugin in self.plugins:
+ ok = True
+ for method in self.exclude:
+ if hasattr(plugin, method):
+ ok = False
+ self.excluded.append(plugin)
+ break
+ if ok:
+ allow.append(plugin)
+ self.plugins = allow
diff --git a/lib/spack/external/nose/plugins/multiprocess.py b/lib/spack/external/nose/plugins/multiprocess.py
new file mode 100644
index 0000000000..2cae744a11
--- /dev/null
+++ b/lib/spack/external/nose/plugins/multiprocess.py
@@ -0,0 +1,835 @@
+"""
+Overview
+========
+
+The multiprocess plugin enables you to distribute your test run among a set of
+worker processes that run tests in parallel. This can speed up CPU-bound test
+runs (as long as the number of work processeses is around the number of
+processors or cores available), but is mainly useful for IO-bound tests that
+spend most of their time waiting for data to arrive from someplace else.
+
+.. note ::
+
+ See :doc:`../doc_tests/test_multiprocess/multiprocess` for
+ additional documentation and examples. Use of this plugin on python
+ 2.5 or earlier requires the multiprocessing_ module, also available
+ from PyPI.
+
+.. _multiprocessing : http://code.google.com/p/python-multiprocessing/
+
+How tests are distributed
+=========================
+
+The ideal case would be to dispatch each test to a worker process
+separately. This ideal is not attainable in all cases, however, because many
+test suites depend on context (class, module or package) fixtures.
+
+The plugin can't know (unless you tell it -- see below!) if a context fixture
+can be called many times concurrently (is re-entrant), or if it can be shared
+among tests running in different processes. Therefore, if a context has
+fixtures, the default behavior is to dispatch the entire suite to a worker as
+a unit.
+
+Controlling distribution
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+There are two context-level variables that you can use to control this default
+behavior.
+
+If a context's fixtures are re-entrant, set ``_multiprocess_can_split_ = True``
+in the context, and the plugin will dispatch tests in suites bound to that
+context as if the context had no fixtures. This means that the fixtures will
+execute concurrently and multiple times, typically once per test.
+
+If a context's fixtures can be shared by tests running in different processes
+-- such as a package-level fixture that starts an external http server or
+initializes a shared database -- then set ``_multiprocess_shared_ = True`` in
+the context. These fixtures will then execute in the primary nose process, and
+tests in those contexts will be individually dispatched to run in parallel.
+
+How results are collected and reported
+======================================
+
+As each test or suite executes in a worker process, results (failures, errors,
+and specially handled exceptions like SkipTest) are collected in that
+process. When the worker process finishes, it returns results to the main
+nose process. There, any progress output is printed (dots!), and the
+results from the test run are combined into a consolidated result
+set. When results have been received for all dispatched tests, or all
+workers have died, the result summary is output as normal.
+
+Beware!
+=======
+
+Not all test suites will benefit from, or even operate correctly using, this
+plugin. For example, CPU-bound tests will run more slowly if you don't have
+multiple processors. There are also some differences in plugin
+interactions and behaviors due to the way in which tests are dispatched and
+loaded. In general, test loading under this plugin operates as if it were
+always in directed mode instead of discovered mode. For instance, doctests
+in test modules will always be found when using this plugin with the doctest
+plugin.
+
+But the biggest issue you will face is probably concurrency. Unless you
+have kept your tests as religiously pure unit tests, with no side-effects, no
+ordering issues, and no external dependencies, chances are you will experience
+odd, intermittent and unexplainable failures and errors when using this
+plugin. This doesn't necessarily mean the plugin is broken; it may mean that
+your test suite is not safe for concurrency.
+
+New Features in 1.1.0
+=====================
+
+* functions generated by test generators are now added to the worker queue
+ making them multi-threaded.
+* fixed timeout functionality, now functions will be terminated with a
+ TimedOutException exception when they exceed their execution time. The
+ worker processes are not terminated.
+* added ``--process-restartworker`` option to restart workers once they are
+ done, this helps control memory usage. Sometimes memory leaks can accumulate
+ making long runs very difficult.
+* added global _instantiate_plugins to configure which plugins are started
+ on the worker processes.
+
+"""
+
+import logging
+import os
+import sys
+import time
+import traceback
+import unittest
+import pickle
+import signal
+import nose.case
+from nose.core import TextTestRunner
+from nose import failure
+from nose import loader
+from nose.plugins.base import Plugin
+from nose.pyversion import bytes_
+from nose.result import TextTestResult
+from nose.suite import ContextSuite
+from nose.util import test_address
+try:
+ # 2.7+
+ from unittest.runner import _WritelnDecorator
+except ImportError:
+ from unittest import _WritelnDecorator
+from Queue import Empty
+from warnings import warn
+try:
+ from cStringIO import StringIO
+except ImportError:
+ import StringIO
+
+# this is a list of plugin classes that will be checked for and created inside
+# each worker process
+_instantiate_plugins = None
+
+log = logging.getLogger(__name__)
+
+Process = Queue = Pool = Event = Value = Array = None
+
+# have to inherit KeyboardInterrupt to it will interrupt process properly
+class TimedOutException(KeyboardInterrupt):
+ def __init__(self, value = "Timed Out"):
+ self.value = value
+ def __str__(self):
+ return repr(self.value)
+
+def _import_mp():
+ global Process, Queue, Pool, Event, Value, Array
+ try:
+ from multiprocessing import Manager, Process
+ #prevent the server process created in the manager which holds Python
+ #objects and allows other processes to manipulate them using proxies
+ #to interrupt on SIGINT (keyboardinterrupt) so that the communication
+ #channel between subprocesses and main process is still usable after
+ #ctrl+C is received in the main process.
+ old=signal.signal(signal.SIGINT, signal.SIG_IGN)
+ m = Manager()
+ #reset it back so main process will receive a KeyboardInterrupt
+ #exception on ctrl+c
+ signal.signal(signal.SIGINT, old)
+ Queue, Pool, Event, Value, Array = (
+ m.Queue, m.Pool, m.Event, m.Value, m.Array
+ )
+ except ImportError:
+ warn("multiprocessing module is not available, multiprocess plugin "
+ "cannot be used", RuntimeWarning)
+
+
+class TestLet:
+ def __init__(self, case):
+ try:
+ self._id = case.id()
+ except AttributeError:
+ pass
+ self._short_description = case.shortDescription()
+ self._str = str(case)
+
+ def id(self):
+ return self._id
+
+ def shortDescription(self):
+ return self._short_description
+
+ def __str__(self):
+ return self._str
+
+class MultiProcess(Plugin):
+ """
+ Run tests in multiple processes. Requires processing module.
+ """
+ score = 1000
+ status = {}
+
+ def options(self, parser, env):
+ """
+ Register command-line options.
+ """
+ parser.add_option("--processes", action="store",
+ default=env.get('NOSE_PROCESSES', 0),
+ dest="multiprocess_workers",
+ metavar="NUM",
+ help="Spread test run among this many processes. "
+ "Set a number equal to the number of processors "
+ "or cores in your machine for best results. "
+ "Pass a negative number to have the number of "
+ "processes automatically set to the number of "
+ "cores. Passing 0 means to disable parallel "
+ "testing. Default is 0 unless NOSE_PROCESSES is "
+ "set. "
+ "[NOSE_PROCESSES]")
+ parser.add_option("--process-timeout", action="store",
+ default=env.get('NOSE_PROCESS_TIMEOUT', 10),
+ dest="multiprocess_timeout",
+ metavar="SECONDS",
+ help="Set timeout for return of results from each "
+ "test runner process. Default is 10. "
+ "[NOSE_PROCESS_TIMEOUT]")
+ parser.add_option("--process-restartworker", action="store_true",
+ default=env.get('NOSE_PROCESS_RESTARTWORKER', False),
+ dest="multiprocess_restartworker",
+ help="If set, will restart each worker process once"
+ " their tests are done, this helps control memory "
+ "leaks from killing the system. "
+ "[NOSE_PROCESS_RESTARTWORKER]")
+
+ def configure(self, options, config):
+ """
+ Configure plugin.
+ """
+ try:
+ self.status.pop('active')
+ except KeyError:
+ pass
+ if not hasattr(options, 'multiprocess_workers'):
+ self.enabled = False
+ return
+ # don't start inside of a worker process
+ if config.worker:
+ return
+ self.config = config
+ try:
+ workers = int(options.multiprocess_workers)
+ except (TypeError, ValueError):
+ workers = 0
+ if workers:
+ _import_mp()
+ if Process is None:
+ self.enabled = False
+ return
+ # Negative number of workers will cause multiprocessing to hang.
+ # Set the number of workers to the CPU count to avoid this.
+ if workers < 0:
+ try:
+ import multiprocessing
+ workers = multiprocessing.cpu_count()
+ except NotImplementedError:
+ self.enabled = False
+ return
+ self.enabled = True
+ self.config.multiprocess_workers = workers
+ t = float(options.multiprocess_timeout)
+ self.config.multiprocess_timeout = t
+ r = int(options.multiprocess_restartworker)
+ self.config.multiprocess_restartworker = r
+ self.status['active'] = True
+
+ def prepareTestLoader(self, loader):
+ """Remember loader class so MultiProcessTestRunner can instantiate
+ the right loader.
+ """
+ self.loaderClass = loader.__class__
+
+ def prepareTestRunner(self, runner):
+ """Replace test runner with MultiProcessTestRunner.
+ """
+ # replace with our runner class
+ return MultiProcessTestRunner(stream=runner.stream,
+ verbosity=self.config.verbosity,
+ config=self.config,
+ loaderClass=self.loaderClass)
+
+def signalhandler(sig, frame):
+ raise TimedOutException()
+
+class MultiProcessTestRunner(TextTestRunner):
+ waitkilltime = 5.0 # max time to wait to terminate a process that does not
+ # respond to SIGILL
+ def __init__(self, **kw):
+ self.loaderClass = kw.pop('loaderClass', loader.defaultTestLoader)
+ super(MultiProcessTestRunner, self).__init__(**kw)
+
+ def collect(self, test, testQueue, tasks, to_teardown, result):
+ # dispatch and collect results
+ # put indexes only on queue because tests aren't picklable
+ for case in self.nextBatch(test):
+ log.debug("Next batch %s (%s)", case, type(case))
+ if (isinstance(case, nose.case.Test) and
+ isinstance(case.test, failure.Failure)):
+ log.debug("Case is a Failure")
+ case(result) # run here to capture the failure
+ continue
+ # handle shared fixtures
+ if isinstance(case, ContextSuite) and case.context is failure.Failure:
+ log.debug("Case is a Failure")
+ case(result) # run here to capture the failure
+ continue
+ elif isinstance(case, ContextSuite) and self.sharedFixtures(case):
+ log.debug("%s has shared fixtures", case)
+ try:
+ case.setUp()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ log.debug("%s setup failed", sys.exc_info())
+ result.addError(case, sys.exc_info())
+ else:
+ to_teardown.append(case)
+ if case.factory:
+ ancestors=case.factory.context.get(case, [])
+ for an in ancestors[:2]:
+ #log.debug('reset ancestor %s', an)
+ if getattr(an, '_multiprocess_shared_', False):
+ an._multiprocess_can_split_=True
+ #an._multiprocess_shared_=False
+ self.collect(case, testQueue, tasks, to_teardown, result)
+
+ else:
+ test_addr = self.addtask(testQueue,tasks,case)
+ log.debug("Queued test %s (%s) to %s",
+ len(tasks), test_addr, testQueue)
+
+ def startProcess(self, iworker, testQueue, resultQueue, shouldStop, result):
+ currentaddr = Value('c',bytes_(''))
+ currentstart = Value('d',time.time())
+ keyboardCaught = Event()
+ p = Process(target=runner,
+ args=(iworker, testQueue,
+ resultQueue,
+ currentaddr,
+ currentstart,
+ keyboardCaught,
+ shouldStop,
+ self.loaderClass,
+ result.__class__,
+ pickle.dumps(self.config)))
+ p.currentaddr = currentaddr
+ p.currentstart = currentstart
+ p.keyboardCaught = keyboardCaught
+ old = signal.signal(signal.SIGILL, signalhandler)
+ p.start()
+ signal.signal(signal.SIGILL, old)
+ return p
+
+ def run(self, test):
+ """
+ Execute the test (which may be a test suite). If the test is a suite,
+ distribute it out among as many processes as have been configured, at
+ as fine a level as is possible given the context fixtures defined in
+ the suite or any sub-suites.
+
+ """
+ log.debug("%s.run(%s) (%s)", self, test, os.getpid())
+ wrapper = self.config.plugins.prepareTest(test)
+ if wrapper is not None:
+ test = wrapper
+
+ # plugins can decorate or capture the output stream
+ wrapped = self.config.plugins.setOutputStream(self.stream)
+ if wrapped is not None:
+ self.stream = wrapped
+
+ testQueue = Queue()
+ resultQueue = Queue()
+ tasks = []
+ completed = []
+ workers = []
+ to_teardown = []
+ shouldStop = Event()
+
+ result = self._makeResult()
+ start = time.time()
+
+ self.collect(test, testQueue, tasks, to_teardown, result)
+
+ log.debug("Starting %s workers", self.config.multiprocess_workers)
+ for i in range(self.config.multiprocess_workers):
+ p = self.startProcess(i, testQueue, resultQueue, shouldStop, result)
+ workers.append(p)
+ log.debug("Started worker process %s", i+1)
+
+ total_tasks = len(tasks)
+ # need to keep track of the next time to check for timeouts in case
+ # more than one process times out at the same time.
+ nexttimeout=self.config.multiprocess_timeout
+ thrownError = None
+
+ try:
+ while tasks:
+ log.debug("Waiting for results (%s/%s tasks), next timeout=%.3fs",
+ len(completed), total_tasks,nexttimeout)
+ try:
+ iworker, addr, newtask_addrs, batch_result = resultQueue.get(
+ timeout=nexttimeout)
+ log.debug('Results received for worker %d, %s, new tasks: %d',
+ iworker,addr,len(newtask_addrs))
+ try:
+ try:
+ tasks.remove(addr)
+ except ValueError:
+ log.warn('worker %s failed to remove from tasks: %s',
+ iworker,addr)
+ total_tasks += len(newtask_addrs)
+ tasks.extend(newtask_addrs)
+ except KeyError:
+ log.debug("Got result for unknown task? %s", addr)
+ log.debug("current: %s",str(list(tasks)[0]))
+ else:
+ completed.append([addr,batch_result])
+ self.consolidate(result, batch_result)
+ if (self.config.stopOnError
+ and not result.wasSuccessful()):
+ # set the stop condition
+ shouldStop.set()
+ break
+ if self.config.multiprocess_restartworker:
+ log.debug('joining worker %s',iworker)
+ # wait for working, but not that important if worker
+ # cannot be joined in fact, for workers that add to
+ # testQueue, they will not terminate until all their
+ # items are read
+ workers[iworker].join(timeout=1)
+ if not shouldStop.is_set() and not testQueue.empty():
+ log.debug('starting new process on worker %s',iworker)
+ workers[iworker] = self.startProcess(iworker, testQueue, resultQueue, shouldStop, result)
+ except Empty:
+ log.debug("Timed out with %s tasks pending "
+ "(empty testQueue=%r): %s",
+ len(tasks),testQueue.empty(),str(tasks))
+ any_alive = False
+ for iworker, w in enumerate(workers):
+ if w.is_alive():
+ worker_addr = bytes_(w.currentaddr.value,'ascii')
+ timeprocessing = time.time() - w.currentstart.value
+ if ( len(worker_addr) == 0
+ and timeprocessing > self.config.multiprocess_timeout-0.1):
+ log.debug('worker %d has finished its work item, '
+ 'but is not exiting? do we wait for it?',
+ iworker)
+ else:
+ any_alive = True
+ if (len(worker_addr) > 0
+ and timeprocessing > self.config.multiprocess_timeout-0.1):
+ log.debug('timed out worker %s: %s',
+ iworker,worker_addr)
+ w.currentaddr.value = bytes_('')
+ # If the process is in C++ code, sending a SIGILL
+ # might not send a python KeybordInterrupt exception
+ # therefore, send multiple signals until an
+ # exception is caught. If this takes too long, then
+ # terminate the process
+ w.keyboardCaught.clear()
+ startkilltime = time.time()
+ while not w.keyboardCaught.is_set() and w.is_alive():
+ if time.time()-startkilltime > self.waitkilltime:
+ # have to terminate...
+ log.error("terminating worker %s",iworker)
+ w.terminate()
+ # there is a small probability that the
+ # terminated process might send a result,
+ # which has to be specially handled or
+ # else processes might get orphaned.
+ workers[iworker] = w = self.startProcess(iworker, testQueue, resultQueue, shouldStop, result)
+ break
+ os.kill(w.pid, signal.SIGILL)
+ time.sleep(0.1)
+ if not any_alive and testQueue.empty():
+ log.debug("All workers dead")
+ break
+ nexttimeout=self.config.multiprocess_timeout
+ for w in workers:
+ if w.is_alive() and len(w.currentaddr.value) > 0:
+ timeprocessing = time.time()-w.currentstart.value
+ if timeprocessing <= self.config.multiprocess_timeout:
+ nexttimeout = min(nexttimeout,
+ self.config.multiprocess_timeout-timeprocessing)
+ log.debug("Completed %s tasks (%s remain)", len(completed), len(tasks))
+
+ except (KeyboardInterrupt, SystemExit), e:
+ log.info('parent received ctrl-c when waiting for test results')
+ thrownError = e
+ #resultQueue.get(False)
+
+ result.addError(test, sys.exc_info())
+
+ try:
+ for case in to_teardown:
+ log.debug("Tearing down shared fixtures for %s", case)
+ try:
+ case.tearDown()
+ except (KeyboardInterrupt, SystemExit):
+ raise
+ except:
+ result.addError(case, sys.exc_info())
+
+ stop = time.time()
+
+ # first write since can freeze on shutting down processes
+ result.printErrors()
+ result.printSummary(start, stop)
+ self.config.plugins.finalize(result)
+
+ if thrownError is None:
+ log.debug("Tell all workers to stop")
+ for w in workers:
+ if w.is_alive():
+ testQueue.put('STOP', block=False)
+
+ # wait for the workers to end
+ for iworker,worker in enumerate(workers):
+ if worker.is_alive():
+ log.debug('joining worker %s',iworker)
+ worker.join()
+ if worker.is_alive():
+ log.debug('failed to join worker %s',iworker)
+ except (KeyboardInterrupt, SystemExit):
+ log.info('parent received ctrl-c when shutting down: stop all processes')
+ for worker in workers:
+ if worker.is_alive():
+ worker.terminate()
+
+ if thrownError: raise thrownError
+ else: raise
+
+ return result
+
+ def addtask(testQueue,tasks,case):
+ arg = None
+ if isinstance(case,nose.case.Test) and hasattr(case.test,'arg'):
+ # this removes the top level descriptor and allows real function
+ # name to be returned
+ case.test.descriptor = None
+ arg = case.test.arg
+ test_addr = MultiProcessTestRunner.address(case)
+ testQueue.put((test_addr,arg), block=False)
+ if arg is not None:
+ test_addr += str(arg)
+ if tasks is not None:
+ tasks.append(test_addr)
+ return test_addr
+ addtask = staticmethod(addtask)
+
+ def address(case):
+ if hasattr(case, 'address'):
+ file, mod, call = case.address()
+ elif hasattr(case, 'context'):
+ file, mod, call = test_address(case.context)
+ else:
+ raise Exception("Unable to convert %s to address" % case)
+ parts = []
+ if file is None:
+ if mod is None:
+ raise Exception("Unaddressable case %s" % case)
+ else:
+ parts.append(mod)
+ else:
+ # strip __init__.py(c) from end of file part
+ # if present, having it there confuses loader
+ dirname, basename = os.path.split(file)
+ if basename.startswith('__init__'):
+ file = dirname
+ parts.append(file)
+ if call is not None:
+ parts.append(call)
+ return ':'.join(map(str, parts))
+ address = staticmethod(address)
+
+ def nextBatch(self, test):
+ # allows tests or suites to mark themselves as not safe
+ # for multiprocess execution
+ if hasattr(test, 'context'):
+ if not getattr(test.context, '_multiprocess_', True):
+ return
+
+ if ((isinstance(test, ContextSuite)
+ and test.hasFixtures(self.checkCanSplit))
+ or not getattr(test, 'can_split', True)
+ or not isinstance(test, unittest.TestSuite)):
+ # regular test case, or a suite with context fixtures
+
+ # special case: when run like nosetests path/to/module.py
+ # the top-level suite has only one item, and it shares
+ # the same context as that item. In that case, we want the
+ # item, not the top-level suite
+ if isinstance(test, ContextSuite):
+ contained = list(test)
+ if (len(contained) == 1
+ and getattr(contained[0],
+ 'context', None) == test.context):
+ test = contained[0]
+ yield test
+ else:
+ # Suite is without fixtures at this level; but it may have
+ # fixtures at any deeper level, so we need to examine it all
+ # the way down to the case level
+ for case in test:
+ for batch in self.nextBatch(case):
+ yield batch
+
+ def checkCanSplit(context, fixt):
+ """
+ Callback that we use to check whether the fixtures found in a
+ context or ancestor are ones we care about.
+
+ Contexts can tell us that their fixtures are reentrant by setting
+ _multiprocess_can_split_. So if we see that, we return False to
+ disregard those fixtures.
+ """
+ if not fixt:
+ return False
+ if getattr(context, '_multiprocess_can_split_', False):
+ return False
+ return True
+ checkCanSplit = staticmethod(checkCanSplit)
+
+ def sharedFixtures(self, case):
+ context = getattr(case, 'context', None)
+ if not context:
+ return False
+ return getattr(context, '_multiprocess_shared_', False)
+
+ def consolidate(self, result, batch_result):
+ log.debug("batch result is %s" , batch_result)
+ try:
+ output, testsRun, failures, errors, errorClasses = batch_result
+ except ValueError:
+ log.debug("result in unexpected format %s", batch_result)
+ failure.Failure(*sys.exc_info())(result)
+ return
+ self.stream.write(output)
+ result.testsRun += testsRun
+ result.failures.extend(failures)
+ result.errors.extend(errors)
+ for key, (storage, label, isfail) in errorClasses.items():
+ if key not in result.errorClasses:
+ # Ordinarily storage is result attribute
+ # but it's only processed through the errorClasses
+ # dict, so it's ok to fake it here
+ result.errorClasses[key] = ([], label, isfail)
+ mystorage, _junk, _junk = result.errorClasses[key]
+ mystorage.extend(storage)
+ log.debug("Ran %s tests (total: %s)", testsRun, result.testsRun)
+
+
+def runner(ix, testQueue, resultQueue, currentaddr, currentstart,
+ keyboardCaught, shouldStop, loaderClass, resultClass, config):
+ try:
+ try:
+ return __runner(ix, testQueue, resultQueue, currentaddr, currentstart,
+ keyboardCaught, shouldStop, loaderClass, resultClass, config)
+ except KeyboardInterrupt:
+ log.debug('Worker %s keyboard interrupt, stopping',ix)
+ except Empty:
+ log.debug("Worker %s timed out waiting for tasks", ix)
+
+def __runner(ix, testQueue, resultQueue, currentaddr, currentstart,
+ keyboardCaught, shouldStop, loaderClass, resultClass, config):
+
+ config = pickle.loads(config)
+ dummy_parser = config.parserClass()
+ if _instantiate_plugins is not None:
+ for pluginclass in _instantiate_plugins:
+ plugin = pluginclass()
+ plugin.addOptions(dummy_parser,{})
+ config.plugins.addPlugin(plugin)
+ config.plugins.configure(config.options,config)
+ config.plugins.begin()
+ log.debug("Worker %s executing, pid=%d", ix,os.getpid())
+ loader = loaderClass(config=config)
+ loader.suiteClass.suiteClass = NoSharedFixtureContextSuite
+
+ def get():
+ return testQueue.get(timeout=config.multiprocess_timeout)
+
+ def makeResult():
+ stream = _WritelnDecorator(StringIO())
+ result = resultClass(stream, descriptions=1,
+ verbosity=config.verbosity,
+ config=config)
+ plug_result = config.plugins.prepareTestResult(result)
+ if plug_result:
+ return plug_result
+ return result
+
+ def batch(result):
+ failures = [(TestLet(c), err) for c, err in result.failures]
+ errors = [(TestLet(c), err) for c, err in result.errors]
+ errorClasses = {}
+ for key, (storage, label, isfail) in result.errorClasses.items():
+ errorClasses[key] = ([(TestLet(c), err) for c, err in storage],
+ label, isfail)
+ return (
+ result.stream.getvalue(),
+ result.testsRun,
+ failures,
+ errors,
+ errorClasses)
+ for test_addr, arg in iter(get, 'STOP'):
+ if shouldStop.is_set():
+ log.exception('Worker %d STOPPED',ix)
+ break
+ result = makeResult()
+ test = loader.loadTestsFromNames([test_addr])
+ test.testQueue = testQueue
+ test.tasks = []
+ test.arg = arg
+ log.debug("Worker %s Test is %s (%s)", ix, test_addr, test)
+ try:
+ if arg is not None:
+ test_addr = test_addr + str(arg)
+ currentaddr.value = bytes_(test_addr)
+ currentstart.value = time.time()
+ test(result)
+ currentaddr.value = bytes_('')
+ resultQueue.put((ix, test_addr, test.tasks, batch(result)))
+ except KeyboardInterrupt, e: #TimedOutException:
+ timeout = isinstance(e, TimedOutException)
+ if timeout:
+ keyboardCaught.set()
+ if len(currentaddr.value):
+ if timeout:
+ msg = 'Worker %s timed out, failing current test %s'
+ else:
+ msg = 'Worker %s keyboard interrupt, failing current test %s'
+ log.exception(msg,ix,test_addr)
+ currentaddr.value = bytes_('')
+ failure.Failure(*sys.exc_info())(result)
+ resultQueue.put((ix, test_addr, test.tasks, batch(result)))
+ else:
+ if timeout:
+ msg = 'Worker %s test %s timed out'
+ else:
+ msg = 'Worker %s test %s keyboard interrupt'
+ log.debug(msg,ix,test_addr)
+ resultQueue.put((ix, test_addr, test.tasks, batch(result)))
+ if not timeout:
+ raise
+ except SystemExit:
+ currentaddr.value = bytes_('')
+ log.exception('Worker %s system exit',ix)
+ raise
+ except:
+ currentaddr.value = bytes_('')
+ log.exception("Worker %s error running test or returning "
+ "results",ix)
+ failure.Failure(*sys.exc_info())(result)
+ resultQueue.put((ix, test_addr, test.tasks, batch(result)))
+ if config.multiprocess_restartworker:
+ break
+ log.debug("Worker %s ending", ix)
+
+
+class NoSharedFixtureContextSuite(ContextSuite):
+ """
+ Context suite that never fires shared fixtures.
+
+ When a context sets _multiprocess_shared_, fixtures in that context
+ are executed by the main process. Using this suite class prevents them
+ from executing in the runner process as well.
+
+ """
+ testQueue = None
+ tasks = None
+ arg = None
+ def setupContext(self, context):
+ if getattr(context, '_multiprocess_shared_', False):
+ return
+ super(NoSharedFixtureContextSuite, self).setupContext(context)
+
+ def teardownContext(self, context):
+ if getattr(context, '_multiprocess_shared_', False):
+ return
+ super(NoSharedFixtureContextSuite, self).teardownContext(context)
+ def run(self, result):
+ """Run tests in suite inside of suite fixtures.
+ """
+ # proxy the result for myself
+ log.debug("suite %s (%s) run called, tests: %s",
+ id(self), self, self._tests)
+ if self.resultProxy:
+ result, orig = self.resultProxy(result, self), result
+ else:
+ result, orig = result, result
+ try:
+ #log.debug('setUp for %s', id(self));
+ self.setUp()
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.error_context = 'setup'
+ result.addError(self, self._exc_info())
+ return
+ try:
+ for test in self._tests:
+ if (isinstance(test,nose.case.Test)
+ and self.arg is not None):
+ test.test.arg = self.arg
+ else:
+ test.arg = self.arg
+ test.testQueue = self.testQueue
+ test.tasks = self.tasks
+ if result.shouldStop:
+ log.debug("stopping")
+ break
+ # each nose.case.Test will create its own result proxy
+ # so the cases need the original result, to avoid proxy
+ # chains
+ #log.debug('running test %s in suite %s', test, self);
+ try:
+ test(orig)
+ except KeyboardInterrupt, e:
+ timeout = isinstance(e, TimedOutException)
+ if timeout:
+ msg = 'Timeout when running test %s in suite %s'
+ else:
+ msg = 'KeyboardInterrupt when running test %s in suite %s'
+ log.debug(msg, test, self)
+ err = (TimedOutException,TimedOutException(str(test)),
+ sys.exc_info()[2])
+ test.config.plugins.addError(test,err)
+ orig.addError(test,err)
+ if not timeout:
+ raise
+ finally:
+ self.has_run = True
+ try:
+ #log.debug('tearDown for %s', id(self));
+ self.tearDown()
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.error_context = 'teardown'
+ result.addError(self, self._exc_info())
diff --git a/lib/spack/external/nose/plugins/plugintest.py b/lib/spack/external/nose/plugins/plugintest.py
new file mode 100644
index 0000000000..76d0d2c48c
--- /dev/null
+++ b/lib/spack/external/nose/plugins/plugintest.py
@@ -0,0 +1,416 @@
+"""
+Testing Plugins
+===============
+
+The plugin interface is well-tested enough to safely unit test your
+use of its hooks with some level of confidence. However, there is also
+a mixin for unittest.TestCase called PluginTester that's designed to
+test plugins in their native runtime environment.
+
+Here's a simple example with a do-nothing plugin and a composed suite.
+
+ >>> import unittest
+ >>> from nose.plugins import Plugin, PluginTester
+ >>> class FooPlugin(Plugin):
+ ... pass
+ >>> class TestPluginFoo(PluginTester, unittest.TestCase):
+ ... activate = '--with-foo'
+ ... plugins = [FooPlugin()]
+ ... def test_foo(self):
+ ... for line in self.output:
+ ... # i.e. check for patterns
+ ... pass
+ ...
+ ... # or check for a line containing ...
+ ... assert "ValueError" in self.output
+ ... def makeSuite(self):
+ ... class TC(unittest.TestCase):
+ ... def runTest(self):
+ ... raise ValueError("I hate foo")
+ ... return [TC('runTest')]
+ ...
+ >>> res = unittest.TestResult()
+ >>> case = TestPluginFoo('test_foo')
+ >>> _ = case(res)
+ >>> res.errors
+ []
+ >>> res.failures
+ []
+ >>> res.wasSuccessful()
+ True
+ >>> res.testsRun
+ 1
+
+And here is a more complex example of testing a plugin that has extra
+arguments and reads environment variables.
+
+ >>> import unittest, os
+ >>> from nose.plugins import Plugin, PluginTester
+ >>> class FancyOutputter(Plugin):
+ ... name = "fancy"
+ ... def configure(self, options, conf):
+ ... Plugin.configure(self, options, conf)
+ ... if not self.enabled:
+ ... return
+ ... self.fanciness = 1
+ ... if options.more_fancy:
+ ... self.fanciness = 2
+ ... if 'EVEN_FANCIER' in self.env:
+ ... self.fanciness = 3
+ ...
+ ... def options(self, parser, env=os.environ):
+ ... self.env = env
+ ... parser.add_option('--more-fancy', action='store_true')
+ ... Plugin.options(self, parser, env=env)
+ ...
+ ... def report(self, stream):
+ ... stream.write("FANCY " * self.fanciness)
+ ...
+ >>> class TestFancyOutputter(PluginTester, unittest.TestCase):
+ ... activate = '--with-fancy' # enables the plugin
+ ... plugins = [FancyOutputter()]
+ ... args = ['--more-fancy']
+ ... env = {'EVEN_FANCIER': '1'}
+ ...
+ ... def test_fancy_output(self):
+ ... assert "FANCY FANCY FANCY" in self.output, (
+ ... "got: %s" % self.output)
+ ... def makeSuite(self):
+ ... class TC(unittest.TestCase):
+ ... def runTest(self):
+ ... raise ValueError("I hate fancy stuff")
+ ... return [TC('runTest')]
+ ...
+ >>> res = unittest.TestResult()
+ >>> case = TestFancyOutputter('test_fancy_output')
+ >>> _ = case(res)
+ >>> res.errors
+ []
+ >>> res.failures
+ []
+ >>> res.wasSuccessful()
+ True
+ >>> res.testsRun
+ 1
+
+"""
+
+import re
+import sys
+from warnings import warn
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+__all__ = ['PluginTester', 'run']
+
+from os import getpid
+class MultiProcessFile(object):
+ """
+ helper for testing multiprocessing
+
+ multiprocessing poses a problem for doctests, since the strategy
+ of replacing sys.stdout/stderr with file-like objects then
+ inspecting the results won't work: the child processes will
+ write to the objects, but the data will not be reflected
+ in the parent doctest-ing process.
+
+ The solution is to create file-like objects which will interact with
+ multiprocessing in a more desirable way.
+
+ All processes can write to this object, but only the creator can read.
+ This allows the testing system to see a unified picture of I/O.
+ """
+ def __init__(self):
+ # per advice at:
+ # http://docs.python.org/library/multiprocessing.html#all-platforms
+ self.__master = getpid()
+ self.__queue = Manager().Queue()
+ self.__buffer = StringIO()
+ self.softspace = 0
+
+ def buffer(self):
+ if getpid() != self.__master:
+ return
+
+ from Queue import Empty
+ from collections import defaultdict
+ cache = defaultdict(str)
+ while True:
+ try:
+ pid, data = self.__queue.get_nowait()
+ except Empty:
+ break
+ if pid == ():
+ #show parent output after children
+ #this is what users see, usually
+ pid = ( 1e100, ) # googol!
+ cache[pid] += data
+ for pid in sorted(cache):
+ #self.__buffer.write( '%s wrote: %r\n' % (pid, cache[pid]) ) #DEBUG
+ self.__buffer.write( cache[pid] )
+ def write(self, data):
+ # note that these pids are in the form of current_process()._identity
+ # rather than OS pids
+ from multiprocessing import current_process
+ pid = current_process()._identity
+ self.__queue.put((pid, data))
+ def __iter__(self):
+ "getattr doesn't work for iter()"
+ self.buffer()
+ return self.__buffer
+ def seek(self, offset, whence=0):
+ self.buffer()
+ return self.__buffer.seek(offset, whence)
+ def getvalue(self):
+ self.buffer()
+ return self.__buffer.getvalue()
+ def __getattr__(self, attr):
+ return getattr(self.__buffer, attr)
+
+try:
+ from multiprocessing import Manager
+ Buffer = MultiProcessFile
+except ImportError:
+ Buffer = StringIO
+
+class PluginTester(object):
+ """A mixin for testing nose plugins in their runtime environment.
+
+ Subclass this and mix in unittest.TestCase to run integration/functional
+ tests on your plugin. When setUp() is called, the stub test suite is
+ executed with your plugin so that during an actual test you can inspect the
+ artifacts of how your plugin interacted with the stub test suite.
+
+ - activate
+
+ - the argument to send nosetests to activate the plugin
+
+ - suitepath
+
+ - if set, this is the path of the suite to test. Otherwise, you
+ will need to use the hook, makeSuite()
+
+ - plugins
+
+ - the list of plugins to make available during the run. Note
+ that this does not mean these plugins will be *enabled* during
+ the run -- only the plugins enabled by the activate argument
+ or other settings in argv or env will be enabled.
+
+ - args
+
+ - a list of arguments to add to the nosetests command, in addition to
+ the activate argument
+
+ - env
+
+ - optional dict of environment variables to send nosetests
+
+ """
+ activate = None
+ suitepath = None
+ args = None
+ env = {}
+ argv = None
+ plugins = []
+ ignoreFiles = None
+
+ def makeSuite(self):
+ """returns a suite object of tests to run (unittest.TestSuite())
+
+ If self.suitepath is None, this must be implemented. The returned suite
+ object will be executed with all plugins activated. It may return
+ None.
+
+ Here is an example of a basic suite object you can return ::
+
+ >>> import unittest
+ >>> class SomeTest(unittest.TestCase):
+ ... def runTest(self):
+ ... raise ValueError("Now do something, plugin!")
+ ...
+ >>> unittest.TestSuite([SomeTest()]) # doctest: +ELLIPSIS
+ <unittest...TestSuite tests=[<...SomeTest testMethod=runTest>]>
+
+ """
+ raise NotImplementedError
+
+ def _execPlugin(self):
+ """execute the plugin on the internal test suite.
+ """
+ from nose.config import Config
+ from nose.core import TestProgram
+ from nose.plugins.manager import PluginManager
+
+ suite = None
+ stream = Buffer()
+ conf = Config(env=self.env,
+ stream=stream,
+ plugins=PluginManager(plugins=self.plugins))
+ if self.ignoreFiles is not None:
+ conf.ignoreFiles = self.ignoreFiles
+ if not self.suitepath:
+ suite = self.makeSuite()
+
+ self.nose = TestProgram(argv=self.argv, config=conf, suite=suite,
+ exit=False)
+ self.output = AccessDecorator(stream)
+
+ def setUp(self):
+ """runs nosetests with the specified test suite, all plugins
+ activated.
+ """
+ self.argv = ['nosetests', self.activate]
+ if self.args:
+ self.argv.extend(self.args)
+ if self.suitepath:
+ self.argv.append(self.suitepath)
+
+ self._execPlugin()
+
+
+class AccessDecorator(object):
+ stream = None
+ _buf = None
+ def __init__(self, stream):
+ self.stream = stream
+ stream.seek(0)
+ self._buf = stream.read()
+ stream.seek(0)
+ def __contains__(self, val):
+ return val in self._buf
+ def __iter__(self):
+ return iter(self.stream)
+ def __str__(self):
+ return self._buf
+
+
+def blankline_separated_blocks(text):
+ "a bunch of === characters is also considered a blank line"
+ block = []
+ for line in text.splitlines(True):
+ block.append(line)
+ line = line.strip()
+ if not line or line.startswith('===') and not line.strip('='):
+ yield "".join(block)
+ block = []
+ if block:
+ yield "".join(block)
+
+
+def remove_stack_traces(out):
+ # this regexp taken from Python 2.5's doctest
+ traceback_re = re.compile(r"""
+ # Grab the traceback header. Different versions of Python have
+ # said different things on the first traceback line.
+ ^(?P<hdr> Traceback\ \(
+ (?: most\ recent\ call\ last
+ | innermost\ last
+ ) \) :
+ )
+ \s* $ # toss trailing whitespace on the header.
+ (?P<stack> .*?) # don't blink: absorb stuff until...
+ ^(?=\w) # a line *starts* with alphanum.
+ .*?(?P<exception> \w+ ) # exception name
+ (?P<msg> [:\n] .*) # the rest
+ """, re.VERBOSE | re.MULTILINE | re.DOTALL)
+ blocks = []
+ for block in blankline_separated_blocks(out):
+ blocks.append(traceback_re.sub(r"\g<hdr>\n...\n\g<exception>\g<msg>", block))
+ return "".join(blocks)
+
+
+def simplify_warnings(out):
+ warn_re = re.compile(r"""
+ # Cut the file and line no, up to the warning name
+ ^.*:\d+:\s
+ (?P<category>\w+): \s+ # warning category
+ (?P<detail>.+) $ \n? # warning message
+ ^ .* $ # stack frame
+ """, re.VERBOSE | re.MULTILINE)
+ return warn_re.sub(r"\g<category>: \g<detail>", out)
+
+
+def remove_timings(out):
+ return re.sub(
+ r"Ran (\d+ tests?) in [0-9.]+s", r"Ran \1 in ...s", out)
+
+
+def munge_nose_output_for_doctest(out):
+ """Modify nose output to make it easy to use in doctests."""
+ out = remove_stack_traces(out)
+ out = simplify_warnings(out)
+ out = remove_timings(out)
+ return out.strip()
+
+
+def run(*arg, **kw):
+ """
+ Specialized version of nose.run for use inside of doctests that
+ test test runs.
+
+ This version of run() prints the result output to stdout. Before
+ printing, the output is processed by replacing the timing
+ information with an ellipsis (...), removing traceback stacks, and
+ removing trailing whitespace.
+
+ Use this version of run wherever you are writing a doctest that
+ tests nose (or unittest) test result output.
+
+ Note: do not use doctest: +ELLIPSIS when testing nose output,
+ since ellipses ("test_foo ... ok") in your expected test runner
+ output may match multiple lines of output, causing spurious test
+ passes!
+ """
+ from nose import run
+ from nose.config import Config
+ from nose.plugins.manager import PluginManager
+
+ buffer = Buffer()
+ if 'config' not in kw:
+ plugins = kw.pop('plugins', [])
+ if isinstance(plugins, list):
+ plugins = PluginManager(plugins=plugins)
+ env = kw.pop('env', {})
+ kw['config'] = Config(env=env, plugins=plugins)
+ if 'argv' not in kw:
+ kw['argv'] = ['nosetests', '-v']
+ kw['config'].stream = buffer
+
+ # Set up buffering so that all output goes to our buffer,
+ # or warn user if deprecated behavior is active. If this is not
+ # done, prints and warnings will either be out of place or
+ # disappear.
+ stderr = sys.stderr
+ stdout = sys.stdout
+ if kw.pop('buffer_all', False):
+ sys.stdout = sys.stderr = buffer
+ restore = True
+ else:
+ restore = False
+ warn("The behavior of nose.plugins.plugintest.run() will change in "
+ "the next release of nose. The current behavior does not "
+ "correctly account for output to stdout and stderr. To enable "
+ "correct behavior, use run_buffered() instead, or pass "
+ "the keyword argument buffer_all=True to run().",
+ DeprecationWarning, stacklevel=2)
+ try:
+ run(*arg, **kw)
+ finally:
+ if restore:
+ sys.stderr = stderr
+ sys.stdout = stdout
+ out = buffer.getvalue()
+ print munge_nose_output_for_doctest(out)
+
+
+def run_buffered(*arg, **kw):
+ kw['buffer_all'] = True
+ run(*arg, **kw)
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/lib/spack/external/nose/plugins/prof.py b/lib/spack/external/nose/plugins/prof.py
new file mode 100644
index 0000000000..4d304a934b
--- /dev/null
+++ b/lib/spack/external/nose/plugins/prof.py
@@ -0,0 +1,154 @@
+"""This plugin will run tests using the hotshot profiler, which is part
+of the standard library. To turn it on, use the ``--with-profile`` option
+or set the NOSE_WITH_PROFILE environment variable. Profiler output can be
+controlled with the ``--profile-sort`` and ``--profile-restrict`` options,
+and the profiler output file may be changed with ``--profile-stats-file``.
+
+See the `hotshot documentation`_ in the standard library documentation for
+more details on the various output options.
+
+.. _hotshot documentation: http://docs.python.org/library/hotshot.html
+"""
+
+try:
+ import hotshot
+ from hotshot import stats
+except ImportError:
+ hotshot, stats = None, None
+import logging
+import os
+import sys
+import tempfile
+from nose.plugins.base import Plugin
+from nose.util import tolist
+
+log = logging.getLogger('nose.plugins')
+
+class Profile(Plugin):
+ """
+ Use this plugin to run tests using the hotshot profiler.
+ """
+ pfile = None
+ clean_stats_file = False
+ def options(self, parser, env):
+ """Register commandline options.
+ """
+ if not self.available():
+ return
+ Plugin.options(self, parser, env)
+ parser.add_option('--profile-sort', action='store', dest='profile_sort',
+ default=env.get('NOSE_PROFILE_SORT', 'cumulative'),
+ metavar="SORT",
+ help="Set sort order for profiler output")
+ parser.add_option('--profile-stats-file', action='store',
+ dest='profile_stats_file',
+ metavar="FILE",
+ default=env.get('NOSE_PROFILE_STATS_FILE'),
+ help='Profiler stats file; default is a new '
+ 'temp file on each run')
+ parser.add_option('--profile-restrict', action='append',
+ dest='profile_restrict',
+ metavar="RESTRICT",
+ default=env.get('NOSE_PROFILE_RESTRICT'),
+ help="Restrict profiler output. See help for "
+ "pstats.Stats for details")
+
+ def available(cls):
+ return hotshot is not None
+ available = classmethod(available)
+
+ def begin(self):
+ """Create profile stats file and load profiler.
+ """
+ if not self.available():
+ return
+ self._create_pfile()
+ self.prof = hotshot.Profile(self.pfile)
+
+ def configure(self, options, conf):
+ """Configure plugin.
+ """
+ if not self.available():
+ self.enabled = False
+ return
+ Plugin.configure(self, options, conf)
+ self.conf = conf
+ if options.profile_stats_file:
+ self.pfile = options.profile_stats_file
+ self.clean_stats_file = False
+ else:
+ self.pfile = None
+ self.clean_stats_file = True
+ self.fileno = None
+ self.sort = options.profile_sort
+ self.restrict = tolist(options.profile_restrict)
+
+ def prepareTest(self, test):
+ """Wrap entire test run in :func:`prof.runcall`.
+ """
+ if not self.available():
+ return
+ log.debug('preparing test %s' % test)
+ def run_and_profile(result, prof=self.prof, test=test):
+ self._create_pfile()
+ prof.runcall(test, result)
+ return run_and_profile
+
+ def report(self, stream):
+ """Output profiler report.
+ """
+ log.debug('printing profiler report')
+ self.prof.close()
+ prof_stats = stats.load(self.pfile)
+ prof_stats.sort_stats(self.sort)
+
+ # 2.5 has completely different stream handling from 2.4 and earlier.
+ # Before 2.5, stats objects have no stream attribute; in 2.5 and later
+ # a reference sys.stdout is stored before we can tweak it.
+ compat_25 = hasattr(prof_stats, 'stream')
+ if compat_25:
+ tmp = prof_stats.stream
+ prof_stats.stream = stream
+ else:
+ tmp = sys.stdout
+ sys.stdout = stream
+ try:
+ if self.restrict:
+ log.debug('setting profiler restriction to %s', self.restrict)
+ prof_stats.print_stats(*self.restrict)
+ else:
+ prof_stats.print_stats()
+ finally:
+ if compat_25:
+ prof_stats.stream = tmp
+ else:
+ sys.stdout = tmp
+
+ def finalize(self, result):
+ """Clean up stats file, if configured to do so.
+ """
+ if not self.available():
+ return
+ try:
+ self.prof.close()
+ except AttributeError:
+ # TODO: is this trying to catch just the case where not
+ # hasattr(self.prof, "close")? If so, the function call should be
+ # moved out of the try: suite.
+ pass
+ if self.clean_stats_file:
+ if self.fileno:
+ try:
+ os.close(self.fileno)
+ except OSError:
+ pass
+ try:
+ os.unlink(self.pfile)
+ except OSError:
+ pass
+ return None
+
+ def _create_pfile(self):
+ if not self.pfile:
+ self.fileno, self.pfile = tempfile.mkstemp()
+ self.clean_stats_file = True
diff --git a/lib/spack/external/nose/plugins/skip.py b/lib/spack/external/nose/plugins/skip.py
new file mode 100644
index 0000000000..9d1ac8f604
--- /dev/null
+++ b/lib/spack/external/nose/plugins/skip.py
@@ -0,0 +1,63 @@
+"""
+This plugin installs a SKIP error class for the SkipTest exception.
+When SkipTest is raised, the exception will be logged in the skipped
+attribute of the result, 'S' or 'SKIP' (verbose) will be output, and
+the exception will not be counted as an error or failure. This plugin
+is enabled by default but may be disabled with the ``--no-skip`` option.
+"""
+
+from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
+
+
+# on SkipTest:
+# - unittest SkipTest is first preference, but it's only available
+# for >= 2.7
+# - unittest2 SkipTest is second preference for older pythons. This
+# mirrors logic for choosing SkipTest exception in testtools
+# - if none of the above, provide custom class
+try:
+ from unittest.case import SkipTest
+except ImportError:
+ try:
+ from unittest2.case import SkipTest
+ except ImportError:
+ class SkipTest(Exception):
+ """Raise this exception to mark a test as skipped.
+ """
+ pass
+
+
+class Skip(ErrorClassPlugin):
+ """
+ Plugin that installs a SKIP error class for the SkipTest
+ exception. When SkipTest is raised, the exception will be logged
+ in the skipped attribute of the result, 'S' or 'SKIP' (verbose)
+ will be output, and the exception will not be counted as an error
+ or failure.
+ """
+ enabled = True
+ skipped = ErrorClass(SkipTest,
+ label='SKIP',
+ isfailure=False)
+
+ def options(self, parser, env):
+ """
+ Add my options to command line.
+ """
+ env_opt = 'NOSE_WITHOUT_SKIP'
+ parser.add_option('--no-skip', action='store_true',
+ dest='noSkip', default=env.get(env_opt, False),
+ help="Disable special handling of SkipTest "
+ "exceptions.")
+
+ def configure(self, options, conf):
+ """
+ Configure plugin. Skip plugin is enabled by default.
+ """
+ if not self.can_configure:
+ return
+ self.conf = conf
+ disable = getattr(options, 'noSkip', False)
+ if disable:
+ self.enabled = False
+
diff --git a/lib/spack/external/nose/plugins/testid.py b/lib/spack/external/nose/plugins/testid.py
new file mode 100644
index 0000000000..ae8119bd01
--- /dev/null
+++ b/lib/spack/external/nose/plugins/testid.py
@@ -0,0 +1,311 @@
+"""
+This plugin adds a test id (like #1) to each test name output. After
+you've run once to generate test ids, you can re-run individual
+tests by activating the plugin and passing the ids (with or
+without the # prefix) instead of test names.
+
+For example, if your normal test run looks like::
+
+ % nosetests -v
+ tests.test_a ... ok
+ tests.test_b ... ok
+ tests.test_c ... ok
+
+When adding ``--with-id`` you'll see::
+
+ % nosetests -v --with-id
+ #1 tests.test_a ... ok
+ #2 tests.test_b ... ok
+ #3 tests.test_c ... ok
+
+Then you can re-run individual tests by supplying just an id number::
+
+ % nosetests -v --with-id 2
+ #2 tests.test_b ... ok
+
+You can also pass multiple id numbers::
+
+ % nosetests -v --with-id 2 3
+ #2 tests.test_b ... ok
+ #3 tests.test_c ... ok
+
+Since most shells consider '#' a special character, you can leave it out when
+specifying a test id.
+
+Note that when run without the -v switch, no special output is displayed, but
+the ids file is still written.
+
+Looping over failed tests
+-------------------------
+
+This plugin also adds a mode that will direct the test runner to record
+failed tests. Subsequent test runs will then run only the tests that failed
+last time. Activate this mode with the ``--failed`` switch::
+
+ % nosetests -v --failed
+ #1 test.test_a ... ok
+ #2 test.test_b ... ERROR
+ #3 test.test_c ... FAILED
+ #4 test.test_d ... ok
+
+On the second run, only tests #2 and #3 will run::
+
+ % nosetests -v --failed
+ #2 test.test_b ... ERROR
+ #3 test.test_c ... FAILED
+
+As you correct errors and tests pass, they'll drop out of subsequent runs.
+
+First::
+
+ % nosetests -v --failed
+ #2 test.test_b ... ok
+ #3 test.test_c ... FAILED
+
+Second::
+
+ % nosetests -v --failed
+ #3 test.test_c ... FAILED
+
+When all tests pass, the full set will run on the next invocation.
+
+First::
+
+ % nosetests -v --failed
+ #3 test.test_c ... ok
+
+Second::
+
+ % nosetests -v --failed
+ #1 test.test_a ... ok
+ #2 test.test_b ... ok
+ #3 test.test_c ... ok
+ #4 test.test_d ... ok
+
+.. note ::
+
+ If you expect to use ``--failed`` regularly, it's a good idea to always run
+ using the ``--with-id`` option. This will ensure that an id file is always
+ created, allowing you to add ``--failed`` to the command line as soon as
+ you have failing tests. Otherwise, your first run using ``--failed`` will
+ (perhaps surprisingly) run *all* tests, because there won't be an id file
+ containing the record of failed tests from your previous run.
+
+"""
+__test__ = False
+
+import logging
+import os
+from nose.plugins import Plugin
+from nose.util import src, set
+
+try:
+ from cPickle import dump, load
+except ImportError:
+ from pickle import dump, load
+
+log = logging.getLogger(__name__)
+
+
+class TestId(Plugin):
+ """
+ Activate to add a test id (like #1) to each test name output. Activate
+ with --failed to rerun failing tests only.
+ """
+ name = 'id'
+ idfile = None
+ collecting = True
+ loopOnFailed = False
+
+ def options(self, parser, env):
+ """Register commandline options.
+ """
+ Plugin.options(self, parser, env)
+ parser.add_option('--id-file', action='store', dest='testIdFile',
+ default='.noseids', metavar="FILE",
+ help="Store test ids found in test runs in this "
+ "file. Default is the file .noseids in the "
+ "working directory.")
+ parser.add_option('--failed', action='store_true',
+ dest='failed', default=False,
+ help="Run the tests that failed in the last "
+ "test run.")
+
+ def configure(self, options, conf):
+ """Configure plugin.
+ """
+ Plugin.configure(self, options, conf)
+ if options.failed:
+ self.enabled = True
+ self.loopOnFailed = True
+ log.debug("Looping on failed tests")
+ self.idfile = os.path.expanduser(options.testIdFile)
+ if not os.path.isabs(self.idfile):
+ self.idfile = os.path.join(conf.workingDir, self.idfile)
+ self.id = 1
+ # Ids and tests are mirror images: ids are {id: test address} and
+ # tests are {test address: id}
+ self.ids = {}
+ self.tests = {}
+ self.failed = []
+ self.source_names = []
+ # used to track ids seen when tests is filled from
+ # loaded ids file
+ self._seen = {}
+ self._write_hashes = conf.verbosity >= 2
+
+ def finalize(self, result):
+ """Save new ids file, if needed.
+ """
+ if result.wasSuccessful():
+ self.failed = []
+ if self.collecting:
+ ids = dict(list(zip(list(self.tests.values()), list(self.tests.keys()))))
+ else:
+ ids = self.ids
+ fh = open(self.idfile, 'wb')
+ dump({'ids': ids,
+ 'failed': self.failed,
+ 'source_names': self.source_names}, fh)
+ fh.close()
+ log.debug('Saved test ids: %s, failed %s to %s',
+ ids, self.failed, self.idfile)
+
+ def loadTestsFromNames(self, names, module=None):
+ """Translate ids in the list of requested names into their
+ test addresses, if they are found in my dict of tests.
+ """
+ log.debug('ltfn %s %s', names, module)
+ try:
+ fh = open(self.idfile, 'rb')
+ data = load(fh)
+ if 'ids' in data:
+ self.ids = data['ids']
+ self.failed = data['failed']
+ self.source_names = data['source_names']
+ else:
+ # old ids field
+ self.ids = data
+ self.failed = []
+ self.source_names = names
+ if self.ids:
+ self.id = max(self.ids) + 1
+ self.tests = dict(list(zip(list(self.ids.values()), list(self.ids.keys()))))
+ else:
+ self.id = 1
+ log.debug(
+ 'Loaded test ids %s tests %s failed %s sources %s from %s',
+ self.ids, self.tests, self.failed, self.source_names,
+ self.idfile)
+ fh.close()
+ except ValueError, e:
+ # load() may throw a ValueError when reading the ids file, if it
+ # was generated with a newer version of Python than we are currently
+ # running.
+ log.debug('Error loading %s : %s', self.idfile, str(e))
+ except IOError:
+ log.debug('IO error reading %s', self.idfile)
+
+ if self.loopOnFailed and self.failed:
+ self.collecting = False
+ names = self.failed
+ self.failed = []
+ # I don't load any tests myself, only translate names like '#2'
+ # into the associated test addresses
+ translated = []
+ new_source = []
+ really_new = []
+ for name in names:
+ trans = self.tr(name)
+ if trans != name:
+ translated.append(trans)
+ else:
+ new_source.append(name)
+ # names that are not ids and that are not in the current
+ # list of source names go into the list for next time
+ if new_source:
+ new_set = set(new_source)
+ old_set = set(self.source_names)
+ log.debug("old: %s new: %s", old_set, new_set)
+ really_new = [s for s in new_source
+ if not s in old_set]
+ if really_new:
+ # remember new sources
+ self.source_names.extend(really_new)
+ if not translated:
+ # new set of source names, no translations
+ # means "run the requested tests"
+ names = new_source
+ else:
+ # no new names to translate and add to id set
+ self.collecting = False
+ log.debug("translated: %s new sources %s names %s",
+ translated, really_new, names)
+ return (None, translated + really_new or names)
+
+ def makeName(self, addr):
+ log.debug("Make name %s", addr)
+ filename, module, call = addr
+ if filename is not None:
+ head = src(filename)
+ else:
+ head = module
+ if call is not None:
+ return "%s:%s" % (head, call)
+ return head
+
+ def setOutputStream(self, stream):
+ """Get handle on output stream so the plugin can print id #s
+ """
+ self.stream = stream
+
+ def startTest(self, test):
+ """Maybe output an id # before the test name.
+
+ Example output::
+
+ #1 test.test ... ok
+ #2 test.test_two ... ok
+
+ """
+ adr = test.address()
+ log.debug('start test %s (%s)', adr, adr in self.tests)
+ if adr in self.tests:
+ if adr in self._seen:
+ self.write(' ')
+ else:
+ self.write('#%s ' % self.tests[adr])
+ self._seen[adr] = 1
+ return
+ self.tests[adr] = self.id
+ self.write('#%s ' % self.id)
+ self.id += 1
+
+ def afterTest(self, test):
+ # None means test never ran, False means failed/err
+ if test.passed is False:
+ try:
+ key = str(self.tests[test.address()])
+ except KeyError:
+ # never saw this test -- startTest didn't run
+ pass
+ else:
+ if key not in self.failed:
+ self.failed.append(key)
+
+ def tr(self, name):
+ log.debug("tr '%s'", name)
+ try:
+ key = int(name.replace('#', ''))
+ except ValueError:
+ return name
+ log.debug("Got key %s", key)
+ # I'm running tests mapped from the ids file,
+ # not collecting new ones
+ if key in self.ids:
+ return self.makeName(self.ids[key])
+ return name
+
+ def write(self, output):
+ if self._write_hashes:
+ self.stream.write(output)
diff --git a/lib/spack/external/nose/plugins/xunit.py b/lib/spack/external/nose/plugins/xunit.py
new file mode 100644
index 0000000000..90b52f5f61
--- /dev/null
+++ b/lib/spack/external/nose/plugins/xunit.py
@@ -0,0 +1,341 @@
+"""This plugin provides test results in the standard XUnit XML format.
+
+It's designed for the `Jenkins`_ (previously Hudson) continuous build
+system, but will probably work for anything else that understands an
+XUnit-formatted XML representation of test results.
+
+Add this shell command to your builder ::
+
+ nosetests --with-xunit
+
+And by default a file named nosetests.xml will be written to the
+working directory.
+
+In a Jenkins builder, tick the box named "Publish JUnit test result report"
+under the Post-build Actions and enter this value for Test report XMLs::
+
+ **/nosetests.xml
+
+If you need to change the name or location of the file, you can set the
+``--xunit-file`` option.
+
+If you need to change the name of the test suite, you can set the
+``--xunit-testsuite-name`` option.
+
+Here is an abbreviated version of what an XML test report might look like::
+
+ <?xml version="1.0" encoding="UTF-8"?>
+ <testsuite name="nosetests" tests="1" errors="1" failures="0" skip="0">
+ <testcase classname="path_to_test_suite.TestSomething"
+ name="test_it" time="0">
+ <error type="exceptions.TypeError" message="oops, wrong type">
+ Traceback (most recent call last):
+ ...
+ TypeError: oops, wrong type
+ </error>
+ </testcase>
+ </testsuite>
+
+.. _Jenkins: http://jenkins-ci.org/
+
+"""
+import codecs
+import doctest
+import os
+import sys
+import traceback
+import re
+import inspect
+from StringIO import StringIO
+from time import time
+from xml.sax import saxutils
+
+from nose.plugins.base import Plugin
+from nose.exc import SkipTest
+from nose.pyversion import force_unicode, format_exception
+
+# Invalid XML characters, control characters 0-31 sans \t, \n and \r
+CONTROL_CHARACTERS = re.compile(r"[\000-\010\013\014\016-\037]")
+
+TEST_ID = re.compile(r'^(.*?)(\(.*\))$')
+
+def xml_safe(value):
+ """Replaces invalid XML characters with '?'."""
+ return CONTROL_CHARACTERS.sub('?', value)
+
+def escape_cdata(cdata):
+ """Escape a string for an XML CDATA section."""
+ return xml_safe(cdata).replace(']]>', ']]>]]&gt;<![CDATA[')
+
+def id_split(idval):
+ m = TEST_ID.match(idval)
+ if m:
+ name, fargs = m.groups()
+ head, tail = name.rsplit(".", 1)
+ return [head, tail+fargs]
+ else:
+ return idval.rsplit(".", 1)
+
+def nice_classname(obj):
+ """Returns a nice name for class object or class instance.
+
+ >>> nice_classname(Exception()) # doctest: +ELLIPSIS
+ '...Exception'
+ >>> nice_classname(Exception) # doctest: +ELLIPSIS
+ '...Exception'
+
+ """
+ if inspect.isclass(obj):
+ cls_name = obj.__name__
+ else:
+ cls_name = obj.__class__.__name__
+ mod = inspect.getmodule(obj)
+ if mod:
+ name = mod.__name__
+ # jython
+ if name.startswith('org.python.core.'):
+ name = name[len('org.python.core.'):]
+ return "%s.%s" % (name, cls_name)
+ else:
+ return cls_name
+
+def exc_message(exc_info):
+ """Return the exception's message."""
+ exc = exc_info[1]
+ if exc is None:
+ # str exception
+ result = exc_info[0]
+ else:
+ try:
+ result = str(exc)
+ except UnicodeEncodeError:
+ try:
+ result = unicode(exc)
+ except UnicodeError:
+ # Fallback to args as neither str nor
+ # unicode(Exception(u'\xe6')) work in Python < 2.6
+ result = exc.args[0]
+ result = force_unicode(result, 'UTF-8')
+ return xml_safe(result)
+
+class Tee(object):
+ def __init__(self, encoding, *args):
+ self._encoding = encoding
+ self._streams = args
+
+ def write(self, data):
+ data = force_unicode(data, self._encoding)
+ for s in self._streams:
+ s.write(data)
+
+ def writelines(self, lines):
+ for line in lines:
+ self.write(line)
+
+ def flush(self):
+ for s in self._streams:
+ s.flush()
+
+ def isatty(self):
+ return False
+
+
+class Xunit(Plugin):
+ """This plugin provides test results in the standard XUnit XML format."""
+ name = 'xunit'
+ score = 1500
+ encoding = 'UTF-8'
+ error_report_file = None
+
+ def __init__(self):
+ super(Xunit, self).__init__()
+ self._capture_stack = []
+ self._currentStdout = None
+ self._currentStderr = None
+
+ def _timeTaken(self):
+ if hasattr(self, '_timer'):
+ taken = time() - self._timer
+ else:
+ # test died before it ran (probably error in setup())
+ # or success/failure added before test started probably
+ # due to custom TestResult munging
+ taken = 0.0
+ return taken
+
+ def _quoteattr(self, attr):
+ """Escape an XML attribute. Value can be unicode."""
+ attr = xml_safe(attr)
+ return saxutils.quoteattr(attr)
+
+ def options(self, parser, env):
+ """Sets additional command line options."""
+ Plugin.options(self, parser, env)
+ parser.add_option(
+ '--xunit-file', action='store',
+ dest='xunit_file', metavar="FILE",
+ default=env.get('NOSE_XUNIT_FILE', 'nosetests.xml'),
+ help=("Path to xml file to store the xunit report in. "
+ "Default is nosetests.xml in the working directory "
+ "[NOSE_XUNIT_FILE]"))
+
+ parser.add_option(
+ '--xunit-testsuite-name', action='store',
+ dest='xunit_testsuite_name', metavar="PACKAGE",
+ default=env.get('NOSE_XUNIT_TESTSUITE_NAME', 'nosetests'),
+ help=("Name of the testsuite in the xunit xml, generated by plugin. "
+ "Default test suite name is nosetests."))
+
+ def configure(self, options, config):
+ """Configures the xunit plugin."""
+ Plugin.configure(self, options, config)
+ self.config = config
+ if self.enabled:
+ self.stats = {'errors': 0,
+ 'failures': 0,
+ 'passes': 0,
+ 'skipped': 0
+ }
+ self.errorlist = []
+ self.error_report_file_name = os.path.realpath(options.xunit_file)
+ self.xunit_testsuite_name = options.xunit_testsuite_name
+
+ def report(self, stream):
+ """Writes an Xunit-formatted XML file
+
+ The file includes a report of test errors and failures.
+
+ """
+ self.error_report_file = codecs.open(self.error_report_file_name, 'w',
+ self.encoding, 'replace')
+ self.stats['encoding'] = self.encoding
+ self.stats['testsuite_name'] = self.xunit_testsuite_name
+ self.stats['total'] = (self.stats['errors'] + self.stats['failures']
+ + self.stats['passes'] + self.stats['skipped'])
+ self.error_report_file.write(
+ u'<?xml version="1.0" encoding="%(encoding)s"?>'
+ u'<testsuite name="%(testsuite_name)s" tests="%(total)d" '
+ u'errors="%(errors)d" failures="%(failures)d" '
+ u'skip="%(skipped)d">' % self.stats)
+ self.error_report_file.write(u''.join([force_unicode(e, self.encoding)
+ for e in self.errorlist]))
+ self.error_report_file.write(u'</testsuite>')
+ self.error_report_file.close()
+ if self.config.verbosity > 1:
+ stream.writeln("-" * 70)
+ stream.writeln("XML: %s" % self.error_report_file.name)
+
+ def _startCapture(self):
+ self._capture_stack.append((sys.stdout, sys.stderr))
+ self._currentStdout = StringIO()
+ self._currentStderr = StringIO()
+ sys.stdout = Tee(self.encoding, self._currentStdout, sys.stdout)
+ sys.stderr = Tee(self.encoding, self._currentStderr, sys.stderr)
+
+ def startContext(self, context):
+ self._startCapture()
+
+ def stopContext(self, context):
+ self._endCapture()
+
+ def beforeTest(self, test):
+ """Initializes a timer before starting a test."""
+ self._timer = time()
+ self._startCapture()
+
+ def _endCapture(self):
+ if self._capture_stack:
+ sys.stdout, sys.stderr = self._capture_stack.pop()
+
+ def afterTest(self, test):
+ self._endCapture()
+ self._currentStdout = None
+ self._currentStderr = None
+
+ def finalize(self, test):
+ while self._capture_stack:
+ self._endCapture()
+
+ def _getCapturedStdout(self):
+ if self._currentStdout:
+ value = self._currentStdout.getvalue()
+ if value:
+ return '<system-out><![CDATA[%s]]></system-out>' % escape_cdata(
+ value)
+ return ''
+
+ def _getCapturedStderr(self):
+ if self._currentStderr:
+ value = self._currentStderr.getvalue()
+ if value:
+ return '<system-err><![CDATA[%s]]></system-err>' % escape_cdata(
+ value)
+ return ''
+
+ def addError(self, test, err, capt=None):
+ """Add error output to Xunit report.
+ """
+ taken = self._timeTaken()
+
+ if issubclass(err[0], SkipTest):
+ type = 'skipped'
+ self.stats['skipped'] += 1
+ else:
+ type = 'error'
+ self.stats['errors'] += 1
+
+ tb = format_exception(err, self.encoding)
+ id = test.id()
+
+ self.errorlist.append(
+ u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
+ u'<%(type)s type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
+ u'</%(type)s>%(systemout)s%(systemerr)s</testcase>' %
+ {'cls': self._quoteattr(id_split(id)[0]),
+ 'name': self._quoteattr(id_split(id)[-1]),
+ 'taken': taken,
+ 'type': type,
+ 'errtype': self._quoteattr(nice_classname(err[0])),
+ 'message': self._quoteattr(exc_message(err)),
+ 'tb': escape_cdata(tb),
+ 'systemout': self._getCapturedStdout(),
+ 'systemerr': self._getCapturedStderr(),
+ })
+
+ def addFailure(self, test, err, capt=None, tb_info=None):
+ """Add failure output to Xunit report.
+ """
+ taken = self._timeTaken()
+ tb = format_exception(err, self.encoding)
+ self.stats['failures'] += 1
+ id = test.id()
+
+ self.errorlist.append(
+ u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
+ u'<failure type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
+ u'</failure>%(systemout)s%(systemerr)s</testcase>' %
+ {'cls': self._quoteattr(id_split(id)[0]),
+ 'name': self._quoteattr(id_split(id)[-1]),
+ 'taken': taken,
+ 'errtype': self._quoteattr(nice_classname(err[0])),
+ 'message': self._quoteattr(exc_message(err)),
+ 'tb': escape_cdata(tb),
+ 'systemout': self._getCapturedStdout(),
+ 'systemerr': self._getCapturedStderr(),
+ })
+
+ def addSuccess(self, test, capt=None):
+ """Add success output to Xunit report.
+ """
+ taken = self._timeTaken()
+ self.stats['passes'] += 1
+ id = test.id()
+ self.errorlist.append(
+ '<testcase classname=%(cls)s name=%(name)s '
+ 'time="%(taken).3f">%(systemout)s%(systemerr)s</testcase>' %
+ {'cls': self._quoteattr(id_split(id)[0]),
+ 'name': self._quoteattr(id_split(id)[-1]),
+ 'taken': taken,
+ 'systemout': self._getCapturedStdout(),
+ 'systemerr': self._getCapturedStderr(),
+ })
diff --git a/lib/spack/external/nose/proxy.py b/lib/spack/external/nose/proxy.py
new file mode 100644
index 0000000000..c2676cb195
--- /dev/null
+++ b/lib/spack/external/nose/proxy.py
@@ -0,0 +1,188 @@
+"""
+Result Proxy
+------------
+
+The result proxy wraps the result instance given to each test. It
+performs two functions: enabling extended error/failure reporting
+and calling plugins.
+
+As each result event is fired, plugins are called with the same event;
+however, plugins are called with the nose.case.Test instance that
+wraps the actual test. So when a test fails and calls
+result.addFailure(self, err), the result proxy calls
+addFailure(self.test, err) for each plugin. This allows plugins to
+have a single stable interface for all test types, and also to
+manipulate the test object itself by setting the `test` attribute of
+the nose.case.Test that they receive.
+"""
+import logging
+from nose.config import Config
+
+
+log = logging.getLogger(__name__)
+
+
+def proxied_attribute(local_attr, proxied_attr, doc):
+ """Create a property that proxies attribute ``proxied_attr`` through
+ the local attribute ``local_attr``.
+ """
+ def fget(self):
+ return getattr(getattr(self, local_attr), proxied_attr)
+ def fset(self, value):
+ setattr(getattr(self, local_attr), proxied_attr, value)
+ def fdel(self):
+ delattr(getattr(self, local_attr), proxied_attr)
+ return property(fget, fset, fdel, doc)
+
+
+class ResultProxyFactory(object):
+ """Factory for result proxies. Generates a ResultProxy bound to each test
+ and the result passed to the test.
+ """
+ def __init__(self, config=None):
+ if config is None:
+ config = Config()
+ self.config = config
+ self.__prepared = False
+ self.__result = None
+
+ def __call__(self, result, test):
+ """Return a ResultProxy for the current test.
+
+ On first call, plugins are given a chance to replace the
+ result used for the remaining tests. If a plugin returns a
+ value from prepareTestResult, that object will be used as the
+ result for all tests.
+ """
+ if not self.__prepared:
+ self.__prepared = True
+ plug_result = self.config.plugins.prepareTestResult(result)
+ if plug_result is not None:
+ self.__result = result = plug_result
+ if self.__result is not None:
+ result = self.__result
+ return ResultProxy(result, test, config=self.config)
+
+
+class ResultProxy(object):
+ """Proxy to TestResults (or other results handler).
+
+ One ResultProxy is created for each nose.case.Test. The result
+ proxy calls plugins with the nose.case.Test instance (instead of
+ the wrapped test case) as each result call is made. Finally, the
+ real result method is called, also with the nose.case.Test
+ instance as the test parameter.
+
+ """
+ def __init__(self, result, test, config=None):
+ if config is None:
+ config = Config()
+ self.config = config
+ self.plugins = config.plugins
+ self.result = result
+ self.test = test
+
+ def __repr__(self):
+ return repr(self.result)
+
+ def _prepareErr(self, err):
+ if not isinstance(err[1], Exception) and isinstance(err[0], type):
+ # Turn value back into an Exception (required in Python 3.x).
+ # Plugins do all sorts of crazy things with exception values.
+ # Convert it to a custom subclass of Exception with the same
+ # name as the actual exception to make it print correctly.
+ value = type(err[0].__name__, (Exception,), {})(err[1])
+ err = (err[0], value, err[2])
+ return err
+
+ def assertMyTest(self, test):
+ # The test I was called with must be my .test or my
+ # .test's .test. or my .test.test's .case
+
+ case = getattr(self.test, 'test', None)
+ assert (test is self.test
+ or test is case
+ or test is getattr(case, '_nose_case', None)), (
+ "ResultProxy for %r (%s) was called with test %r (%s)"
+ % (self.test, id(self.test), test, id(test)))
+
+ def afterTest(self, test):
+ self.assertMyTest(test)
+ self.plugins.afterTest(self.test)
+ if hasattr(self.result, "afterTest"):
+ self.result.afterTest(self.test)
+
+ def beforeTest(self, test):
+ self.assertMyTest(test)
+ self.plugins.beforeTest(self.test)
+ if hasattr(self.result, "beforeTest"):
+ self.result.beforeTest(self.test)
+
+ def addError(self, test, err):
+ self.assertMyTest(test)
+ plugins = self.plugins
+ plugin_handled = plugins.handleError(self.test, err)
+ if plugin_handled:
+ return
+ # test.passed is set in result, to account for error classes
+ formatted = plugins.formatError(self.test, err)
+ if formatted is not None:
+ err = formatted
+ plugins.addError(self.test, err)
+ self.result.addError(self.test, self._prepareErr(err))
+ if not self.result.wasSuccessful() and self.config.stopOnError:
+ self.shouldStop = True
+
+ def addFailure(self, test, err):
+ self.assertMyTest(test)
+ plugins = self.plugins
+ plugin_handled = plugins.handleFailure(self.test, err)
+ if plugin_handled:
+ return
+ self.test.passed = False
+ formatted = plugins.formatFailure(self.test, err)
+ if formatted is not None:
+ err = formatted
+ plugins.addFailure(self.test, err)
+ self.result.addFailure(self.test, self._prepareErr(err))
+ if self.config.stopOnError:
+ self.shouldStop = True
+
+ def addSkip(self, test, reason):
+ # 2.7 compat shim
+ from nose.plugins.skip import SkipTest
+ self.assertMyTest(test)
+ plugins = self.plugins
+ if not isinstance(reason, Exception):
+ # for Python 3.2+
+ reason = Exception(reason)
+ plugins.addError(self.test, (SkipTest, reason, None))
+ self.result.addSkip(self.test, reason)
+
+ def addSuccess(self, test):
+ self.assertMyTest(test)
+ self.plugins.addSuccess(self.test)
+ self.result.addSuccess(self.test)
+
+ def startTest(self, test):
+ self.assertMyTest(test)
+ self.plugins.startTest(self.test)
+ self.result.startTest(self.test)
+
+ def stop(self):
+ self.result.stop()
+
+ def stopTest(self, test):
+ self.assertMyTest(test)
+ self.plugins.stopTest(self.test)
+ self.result.stopTest(self.test)
+
+ # proxied attributes
+ shouldStop = proxied_attribute('result', 'shouldStop',
+ """Should the test run stop?""")
+ errors = proxied_attribute('result', 'errors',
+ """Tests that raised an exception""")
+ failures = proxied_attribute('result', 'failures',
+ """Tests that failed""")
+ testsRun = proxied_attribute('result', 'testsRun',
+ """Number of tests run""")
diff --git a/lib/spack/external/nose/pyversion.py b/lib/spack/external/nose/pyversion.py
new file mode 100644
index 0000000000..091238da75
--- /dev/null
+++ b/lib/spack/external/nose/pyversion.py
@@ -0,0 +1,215 @@
+"""
+This module contains fixups for using nose under different versions of Python.
+"""
+import sys
+import os
+import traceback
+import types
+import inspect
+import nose.util
+
+__all__ = ['make_instancemethod', 'cmp_to_key', 'sort_list', 'ClassType',
+ 'TypeType', 'UNICODE_STRINGS', 'unbound_method', 'ismethod',
+ 'bytes_', 'is_base_exception', 'force_unicode', 'exc_to_unicode',
+ 'format_exception']
+
+# In Python 3.x, all strings are unicode (the call to 'unicode()' in the 2.x
+# source will be replaced with 'str()' when running 2to3, so this test will
+# then become true)
+UNICODE_STRINGS = (type(unicode()) == type(str()))
+
+if sys.version_info[:2] < (3, 0):
+ def force_unicode(s, encoding='UTF-8'):
+ try:
+ s = unicode(s)
+ except UnicodeDecodeError:
+ s = str(s).decode(encoding, 'replace')
+
+ return s
+else:
+ def force_unicode(s, encoding='UTF-8'):
+ return str(s)
+
+# new.instancemethod() is obsolete for new-style classes (Python 3.x)
+# We need to use descriptor methods instead.
+try:
+ import new
+ def make_instancemethod(function, instance):
+ return new.instancemethod(function.im_func, instance,
+ instance.__class__)
+except ImportError:
+ def make_instancemethod(function, instance):
+ return function.__get__(instance, instance.__class__)
+
+# To be forward-compatible, we do all list sorts using keys instead of cmp
+# functions. However, part of the unittest.TestLoader API involves a
+# user-provideable cmp function, so we need some way to convert that.
+def cmp_to_key(mycmp):
+ 'Convert a cmp= function into a key= function'
+ class Key(object):
+ def __init__(self, obj):
+ self.obj = obj
+ def __lt__(self, other):
+ return mycmp(self.obj, other.obj) < 0
+ def __gt__(self, other):
+ return mycmp(self.obj, other.obj) > 0
+ def __eq__(self, other):
+ return mycmp(self.obj, other.obj) == 0
+ return Key
+
+# Python 2.3 also does not support list-sorting by key, so we need to convert
+# keys to cmp functions if we're running on old Python..
+if sys.version_info < (2, 4):
+ def sort_list(l, key, reverse=False):
+ if reverse:
+ return l.sort(lambda a, b: cmp(key(b), key(a)))
+ else:
+ return l.sort(lambda a, b: cmp(key(a), key(b)))
+else:
+ def sort_list(l, key, reverse=False):
+ return l.sort(key=key, reverse=reverse)
+
+# In Python 3.x, all objects are "new style" objects descended from 'type', and
+# thus types.ClassType and types.TypeType don't exist anymore. For
+# compatibility, we make sure they still work.
+if hasattr(types, 'ClassType'):
+ ClassType = types.ClassType
+ TypeType = types.TypeType
+else:
+ ClassType = type
+ TypeType = type
+
+# The following emulates the behavior (we need) of an 'unbound method' under
+# Python 3.x (namely, the ability to have a class associated with a function
+# definition so that things can do stuff based on its associated class)
+class UnboundMethod:
+ def __init__(self, cls, func):
+ # Make sure we have all the same attributes as the original function,
+ # so that the AttributeSelector plugin will work correctly...
+ self.__dict__ = func.__dict__.copy()
+ self._func = func
+ self.__self__ = UnboundSelf(cls)
+ if sys.version_info < (3, 0):
+ self.im_class = cls
+ self.__doc__ = getattr(func, '__doc__', None)
+
+ def address(self):
+ cls = self.__self__.cls
+ modname = cls.__module__
+ module = sys.modules[modname]
+ filename = getattr(module, '__file__', None)
+ if filename is not None:
+ filename = os.path.abspath(filename)
+ return (nose.util.src(filename), modname, "%s.%s" % (cls.__name__,
+ self._func.__name__))
+
+ def __call__(self, *args, **kwargs):
+ return self._func(*args, **kwargs)
+
+ def __getattr__(self, attr):
+ return getattr(self._func, attr)
+
+ def __repr__(self):
+ return '<unbound method %s.%s>' % (self.__self__.cls.__name__,
+ self._func.__name__)
+
+class UnboundSelf:
+ def __init__(self, cls):
+ self.cls = cls
+
+ # We have to do this hackery because Python won't let us override the
+ # __class__ attribute...
+ def __getattribute__(self, attr):
+ if attr == '__class__':
+ return self.cls
+ else:
+ return object.__getattribute__(self, attr)
+
+def unbound_method(cls, func):
+ if inspect.ismethod(func):
+ return func
+ if not inspect.isfunction(func):
+ raise TypeError('%s is not a function' % (repr(func),))
+ return UnboundMethod(cls, func)
+
+def ismethod(obj):
+ return inspect.ismethod(obj) or isinstance(obj, UnboundMethod)
+
+
+# Make a pseudo-bytes function that can be called without the encoding arg:
+if sys.version_info >= (3, 0):
+ def bytes_(s, encoding='utf8'):
+ if isinstance(s, bytes):
+ return s
+ return bytes(s, encoding)
+else:
+ def bytes_(s, encoding=None):
+ return str(s)
+
+
+if sys.version_info[:2] >= (2, 6):
+ def isgenerator(o):
+ if isinstance(o, UnboundMethod):
+ o = o._func
+ return inspect.isgeneratorfunction(o) or inspect.isgenerator(o)
+else:
+ try:
+ from compiler.consts import CO_GENERATOR
+ except ImportError:
+ # IronPython doesn't have a complier module
+ CO_GENERATOR=0x20
+
+ def isgenerator(func):
+ try:
+ return func.func_code.co_flags & CO_GENERATOR != 0
+ except AttributeError:
+ return False
+
+# Make a function to help check if an exception is derived from BaseException.
+# In Python 2.4, we just use Exception instead.
+if sys.version_info[:2] < (2, 5):
+ def is_base_exception(exc):
+ return isinstance(exc, Exception)
+else:
+ def is_base_exception(exc):
+ return isinstance(exc, BaseException)
+
+if sys.version_info[:2] < (3, 0):
+ def exc_to_unicode(ev, encoding='utf-8'):
+ if is_base_exception(ev):
+ if not hasattr(ev, '__unicode__'):
+ # 2.5-
+ if not hasattr(ev, 'message'):
+ # 2.4
+ msg = len(ev.args) and ev.args[0] or ''
+ else:
+ msg = ev.message
+ msg = force_unicode(msg, encoding=encoding)
+ clsname = force_unicode(ev.__class__.__name__,
+ encoding=encoding)
+ ev = u'%s: %s' % (clsname, msg)
+ elif not isinstance(ev, unicode):
+ ev = repr(ev)
+
+ return force_unicode(ev, encoding=encoding)
+else:
+ def exc_to_unicode(ev, encoding='utf-8'):
+ return str(ev)
+
+def format_exception(exc_info, encoding='UTF-8'):
+ ec, ev, tb = exc_info
+
+ # Our exception object may have been turned into a string, and Python 3's
+ # traceback.format_exception() doesn't take kindly to that (it expects an
+ # actual exception object). So we work around it, by doing the work
+ # ourselves if ev is not an exception object.
+ if not is_base_exception(ev):
+ tb_data = force_unicode(
+ ''.join(traceback.format_tb(tb)),
+ encoding)
+ ev = exc_to_unicode(ev)
+ return tb_data + ev
+ else:
+ return force_unicode(
+ ''.join(traceback.format_exception(*exc_info)),
+ encoding)
diff --git a/lib/spack/external/nose/result.py b/lib/spack/external/nose/result.py
new file mode 100644
index 0000000000..f974a14ae2
--- /dev/null
+++ b/lib/spack/external/nose/result.py
@@ -0,0 +1,200 @@
+"""
+Test Result
+-----------
+
+Provides a TextTestResult that extends unittest's _TextTestResult to
+provide support for error classes (such as the builtin skip and
+deprecated classes), and hooks for plugins to take over or extend
+reporting.
+"""
+
+import logging
+try:
+ # 2.7+
+ from unittest.runner import _TextTestResult
+except ImportError:
+ from unittest import _TextTestResult
+from nose.config import Config
+from nose.util import isclass, ln as _ln # backwards compat
+
+log = logging.getLogger('nose.result')
+
+
+def _exception_detail(exc):
+ # this is what stdlib module traceback does
+ try:
+ return str(exc)
+ except:
+ return '<unprintable %s object>' % type(exc).__name__
+
+
+class TextTestResult(_TextTestResult):
+ """Text test result that extends unittest's default test result
+ support for a configurable set of errorClasses (eg, Skip,
+ Deprecated, TODO) that extend the errors/failures/success triad.
+ """
+ def __init__(self, stream, descriptions, verbosity, config=None,
+ errorClasses=None):
+ if errorClasses is None:
+ errorClasses = {}
+ self.errorClasses = errorClasses
+ if config is None:
+ config = Config()
+ self.config = config
+ _TextTestResult.__init__(self, stream, descriptions, verbosity)
+
+ def addSkip(self, test, reason):
+ # 2.7 skip compat
+ from nose.plugins.skip import SkipTest
+ if SkipTest in self.errorClasses:
+ storage, label, isfail = self.errorClasses[SkipTest]
+ storage.append((test, reason))
+ self.printLabel(label, (SkipTest, reason, None))
+
+ def addError(self, test, err):
+ """Overrides normal addError to add support for
+ errorClasses. If the exception is a registered class, the
+ error will be added to the list for that class, not errors.
+ """
+ ec, ev, tb = err
+ try:
+ exc_info = self._exc_info_to_string(err, test)
+ except TypeError:
+ # 2.3 compat
+ exc_info = self._exc_info_to_string(err)
+ for cls, (storage, label, isfail) in self.errorClasses.items():
+ #if 'Skip' in cls.__name__ or 'Skip' in ec.__name__:
+ # from nose.tools import set_trace
+ # set_trace()
+ if isclass(ec) and issubclass(ec, cls):
+ if isfail:
+ test.passed = False
+ storage.append((test, exc_info))
+ self.printLabel(label, err)
+ return
+ self.errors.append((test, exc_info))
+ test.passed = False
+ self.printLabel('ERROR')
+
+ # override to bypass changes in 2.7
+ def getDescription(self, test):
+ if self.descriptions:
+ return test.shortDescription() or str(test)
+ else:
+ return str(test)
+
+ def printLabel(self, label, err=None):
+ # Might get patched into a streamless result
+ stream = getattr(self, 'stream', None)
+ if stream is not None:
+ if self.showAll:
+ message = [label]
+ if err:
+ detail = _exception_detail(err[1])
+ if detail:
+ message.append(detail)
+ stream.writeln(": ".join(message))
+ elif self.dots:
+ stream.write(label[:1])
+
+ def printErrors(self):
+ """Overrides to print all errorClasses errors as well.
+ """
+ _TextTestResult.printErrors(self)
+ for cls in self.errorClasses.keys():
+ storage, label, isfail = self.errorClasses[cls]
+ if isfail:
+ self.printErrorList(label, storage)
+ # Might get patched into a result with no config
+ if hasattr(self, 'config'):
+ self.config.plugins.report(self.stream)
+
+ def printSummary(self, start, stop):
+ """Called by the test runner to print the final summary of test
+ run results.
+ """
+ write = self.stream.write
+ writeln = self.stream.writeln
+ taken = float(stop - start)
+ run = self.testsRun
+ plural = run != 1 and "s" or ""
+
+ writeln(self.separator2)
+ writeln("Ran %s test%s in %.3fs" % (run, plural, taken))
+ writeln()
+
+ summary = {}
+ eckeys = self.errorClasses.keys()
+ for cls in eckeys:
+ storage, label, isfail = self.errorClasses[cls]
+ count = len(storage)
+ if not count:
+ continue
+ summary[label] = count
+ if len(self.failures):
+ summary['failures'] = len(self.failures)
+ if len(self.errors):
+ summary['errors'] = len(self.errors)
+
+ if not self.wasSuccessful():
+ write("FAILED")
+ else:
+ write("OK")
+ items = summary.items()
+ if items:
+ items.sort()
+ write(" (")
+ write(", ".join(["%s=%s" % (label, count) for
+ label, count in items]))
+ writeln(")")
+ else:
+ writeln()
+
+ def wasSuccessful(self):
+ """Overrides to check that there are no errors in errorClasses
+ lists that are marked as errors and should cause a run to
+ fail.
+ """
+ if self.errors or self.failures:
+ return False
+ for cls in self.errorClasses.keys():
+ storage, label, isfail = self.errorClasses[cls]
+ if not isfail:
+ continue
+ if storage:
+ return False
+ return True
+
+ def _addError(self, test, err):
+ try:
+ exc_info = self._exc_info_to_string(err, test)
+ except TypeError:
+ # 2.3: does not take test arg
+ exc_info = self._exc_info_to_string(err)
+ self.errors.append((test, exc_info))
+ if self.showAll:
+ self.stream.write('ERROR')
+ elif self.dots:
+ self.stream.write('E')
+
+ def _exc_info_to_string(self, err, test=None):
+ # 2.7 skip compat
+ from nose.plugins.skip import SkipTest
+ if isclass(err[0]) and issubclass(err[0], SkipTest):
+ return str(err[1])
+ # 2.3/2.4 -- 2.4 passes test, 2.3 does not
+ try:
+ return _TextTestResult._exc_info_to_string(self, err, test)
+ except TypeError:
+ # 2.3: does not take test arg
+ return _TextTestResult._exc_info_to_string(self, err)
+
+
+def ln(*arg, **kw):
+ from warnings import warn
+ warn("ln() has moved to nose.util from nose.result and will be removed "
+ "from nose.result in a future release. Please update your imports ",
+ DeprecationWarning)
+ return _ln(*arg, **kw)
+
+
diff --git a/lib/spack/external/nose/selector.py b/lib/spack/external/nose/selector.py
new file mode 100644
index 0000000000..b63f7af0b1
--- /dev/null
+++ b/lib/spack/external/nose/selector.py
@@ -0,0 +1,251 @@
+"""
+Test Selection
+--------------
+
+Test selection is handled by a Selector. The test loader calls the
+appropriate selector method for each object it encounters that it
+thinks may be a test.
+"""
+import logging
+import os
+import unittest
+from nose.config import Config
+from nose.util import split_test_name, src, getfilename, getpackage, ispackage, is_executable
+
+log = logging.getLogger(__name__)
+
+__all__ = ['Selector', 'defaultSelector', 'TestAddress']
+
+
+# for efficiency and easier mocking
+op_join = os.path.join
+op_basename = os.path.basename
+op_exists = os.path.exists
+op_splitext = os.path.splitext
+op_isabs = os.path.isabs
+op_abspath = os.path.abspath
+
+
+class Selector(object):
+ """Core test selector. Examines test candidates and determines whether,
+ given the specified configuration, the test candidate should be selected
+ as a test.
+ """
+ def __init__(self, config):
+ if config is None:
+ config = Config()
+ self.configure(config)
+
+ def configure(self, config):
+ self.config = config
+ self.exclude = config.exclude
+ self.ignoreFiles = config.ignoreFiles
+ self.include = config.include
+ self.plugins = config.plugins
+ self.match = config.testMatch
+
+ def matches(self, name):
+ """Does the name match my requirements?
+
+ To match, a name must match config.testMatch OR config.include
+ and it must not match config.exclude
+ """
+ return ((self.match.search(name)
+ or (self.include and
+ filter(None,
+ [inc.search(name) for inc in self.include])))
+ and ((not self.exclude)
+ or not filter(None,
+ [exc.search(name) for exc in self.exclude])
+ ))
+
+ def wantClass(self, cls):
+ """Is the class a wanted test class?
+
+ A class must be a unittest.TestCase subclass, or match test name
+ requirements. Classes that start with _ are always excluded.
+ """
+ declared = getattr(cls, '__test__', None)
+ if declared is not None:
+ wanted = declared
+ else:
+ wanted = (not cls.__name__.startswith('_')
+ and (issubclass(cls, unittest.TestCase)
+ or self.matches(cls.__name__)))
+
+ plug_wants = self.plugins.wantClass(cls)
+ if plug_wants is not None:
+ log.debug("Plugin setting selection of %s to %s", cls, plug_wants)
+ wanted = plug_wants
+ log.debug("wantClass %s? %s", cls, wanted)
+ return wanted
+
+ def wantDirectory(self, dirname):
+ """Is the directory a wanted test directory?
+
+ All package directories match, so long as they do not match exclude.
+ All other directories must match test requirements.
+ """
+ tail = op_basename(dirname)
+ if ispackage(dirname):
+ wanted = (not self.exclude
+ or not filter(None,
+ [exc.search(tail) for exc in self.exclude]
+ ))
+ else:
+ wanted = (self.matches(tail)
+ or (self.config.srcDirs
+ and tail in self.config.srcDirs))
+ plug_wants = self.plugins.wantDirectory(dirname)
+ if plug_wants is not None:
+ log.debug("Plugin setting selection of %s to %s",
+ dirname, plug_wants)
+ wanted = plug_wants
+ log.debug("wantDirectory %s? %s", dirname, wanted)
+ return wanted
+
+ def wantFile(self, file):
+ """Is the file a wanted test file?
+
+ The file must be a python source file and match testMatch or
+ include, and not match exclude. Files that match ignore are *never*
+ wanted, regardless of plugin, testMatch, include or exclude settings.
+ """
+ # never, ever load files that match anything in ignore
+ # (.* _* and *setup*.py by default)
+ base = op_basename(file)
+ ignore_matches = [ ignore_this for ignore_this in self.ignoreFiles
+ if ignore_this.search(base) ]
+ if ignore_matches:
+ log.debug('%s matches ignoreFiles pattern; skipped',
+ base)
+ return False
+ if not self.config.includeExe and is_executable(file):
+ log.info('%s is executable; skipped', file)
+ return False
+ dummy, ext = op_splitext(base)
+ pysrc = ext == '.py'
+
+ wanted = pysrc and self.matches(base)
+ plug_wants = self.plugins.wantFile(file)
+ if plug_wants is not None:
+ log.debug("plugin setting want %s to %s", file, plug_wants)
+ wanted = plug_wants
+ log.debug("wantFile %s? %s", file, wanted)
+ return wanted
+
+ def wantFunction(self, function):
+ """Is the function a test function?
+ """
+ try:
+ if hasattr(function, 'compat_func_name'):
+ funcname = function.compat_func_name
+ else:
+ funcname = function.__name__
+ except AttributeError:
+ # not a function
+ return False
+ declared = getattr(function, '__test__', None)
+ if declared is not None:
+ wanted = declared
+ else:
+ wanted = not funcname.startswith('_') and self.matches(funcname)
+ plug_wants = self.plugins.wantFunction(function)
+ if plug_wants is not None:
+ wanted = plug_wants
+ log.debug("wantFunction %s? %s", function, wanted)
+ return wanted
+
+ def wantMethod(self, method):
+ """Is the method a test method?
+ """
+ try:
+ method_name = method.__name__
+ except AttributeError:
+ # not a method
+ return False
+ if method_name.startswith('_'):
+ # never collect 'private' methods
+ return False
+ declared = getattr(method, '__test__', None)
+ if declared is not None:
+ wanted = declared
+ else:
+ wanted = self.matches(method_name)
+ plug_wants = self.plugins.wantMethod(method)
+ if plug_wants is not None:
+ wanted = plug_wants
+ log.debug("wantMethod %s? %s", method, wanted)
+ return wanted
+
+ def wantModule(self, module):
+ """Is the module a test module?
+
+ The tail of the module name must match test requirements. One exception:
+ we always want __main__.
+ """
+ declared = getattr(module, '__test__', None)
+ if declared is not None:
+ wanted = declared
+ else:
+ wanted = self.matches(module.__name__.split('.')[-1]) \
+ or module.__name__ == '__main__'
+ plug_wants = self.plugins.wantModule(module)
+ if plug_wants is not None:
+ wanted = plug_wants
+ log.debug("wantModule %s? %s", module, wanted)
+ return wanted
+
+defaultSelector = Selector
+
+
+class TestAddress(object):
+ """A test address represents a user's request to run a particular
+ test. The user may specify a filename or module (or neither),
+ and/or a callable (a class, function, or method). The naming
+ format for test addresses is:
+
+ filename_or_module:callable
+
+ Filenames that are not absolute will be made absolute relative to
+ the working dir.
+
+ The filename or module part will be considered a module name if it
+ doesn't look like a file, that is, if it doesn't exist on the file
+ system and it doesn't contain any directory separators and it
+ doesn't end in .py.
+
+ Callables may be a class name, function name, method name, or
+ class.method specification.
+ """
+ def __init__(self, name, workingDir=None):
+ if workingDir is None:
+ workingDir = os.getcwd()
+ self.name = name
+ self.workingDir = workingDir
+ self.filename, self.module, self.call = split_test_name(name)
+ log.debug('Test name %s resolved to file %s, module %s, call %s',
+ name, self.filename, self.module, self.call)
+ if self.filename is None:
+ if self.module is not None:
+ self.filename = getfilename(self.module, self.workingDir)
+ if self.filename:
+ self.filename = src(self.filename)
+ if not op_isabs(self.filename):
+ self.filename = op_abspath(op_join(workingDir,
+ self.filename))
+ if self.module is None:
+ self.module = getpackage(self.filename)
+ log.debug(
+ 'Final resolution of test name %s: file %s module %s call %s',
+ name, self.filename, self.module, self.call)
+
+ def totuple(self):
+ return (self.filename, self.module, self.call)
+
+ def __str__(self):
+ return self.name
+
+ def __repr__(self):
+ return "%s: (%s, %s, %s)" % (self.name, self.filename,
+ self.module, self.call)
diff --git a/lib/spack/external/nose/sphinx/__init__.py b/lib/spack/external/nose/sphinx/__init__.py
new file mode 100644
index 0000000000..2ae28399f5
--- /dev/null
+++ b/lib/spack/external/nose/sphinx/__init__.py
@@ -0,0 +1 @@
+pass
diff --git a/lib/spack/external/nose/sphinx/pluginopts.py b/lib/spack/external/nose/sphinx/pluginopts.py
new file mode 100644
index 0000000000..d2b284ab27
--- /dev/null
+++ b/lib/spack/external/nose/sphinx/pluginopts.py
@@ -0,0 +1,189 @@
+"""
+Adds a sphinx directive that can be used to automatically document a plugin.
+
+this::
+
+ .. autoplugin :: nose.plugins.foo
+ :plugin: Pluggy
+
+produces::
+
+ .. automodule :: nose.plugins.foo
+
+ Options
+ -------
+
+ .. cmdoption :: --foo=BAR, --fooble=BAR
+
+ Do the foo thing to the new thing.
+
+ Plugin
+ ------
+
+ .. autoclass :: nose.plugins.foo.Pluggy
+ :members:
+
+ Source
+ ------
+
+ .. include :: path/to/nose/plugins/foo.py
+ :literal:
+
+"""
+import os
+try:
+ from docutils import nodes, utils
+ from docutils.statemachine import ViewList
+ from docutils.parsers.rst import directives
+except ImportError:
+ pass # won't run anyway
+
+from nose.util import resolve_name
+from nose.plugins.base import Plugin
+from nose.plugins.manager import BuiltinPluginManager
+from nose.config import Config
+from nose.core import TestProgram
+from inspect import isclass
+
+
+def autoplugin_directive(dirname, arguments, options, content, lineno,
+ content_offset, block_text, state, state_machine):
+ mod_name = arguments[0]
+ mod = resolve_name(mod_name)
+ plug_name = options.get('plugin', None)
+ if plug_name:
+ obj = getattr(mod, plug_name)
+ else:
+ for entry in dir(mod):
+ obj = getattr(mod, entry)
+ if isclass(obj) and issubclass(obj, Plugin) and obj is not Plugin:
+ plug_name = '%s.%s' % (mod_name, entry)
+ break
+
+ # mod docstring
+ rst = ViewList()
+ rst.append('.. automodule :: %s\n' % mod_name, '<autodoc>')
+ rst.append('', '<autodoc>')
+
+ # options
+ rst.append('Options', '<autodoc>')
+ rst.append('-------', '<autodoc>')
+ rst.append('', '<autodoc>')
+
+ plug = obj()
+ opts = OptBucket()
+ plug.options(opts, {})
+ for opt in opts:
+ rst.append(opt.options(), '<autodoc>')
+ rst.append(' \n', '<autodoc>')
+ rst.append(' ' + opt.help + '\n', '<autodoc>')
+ rst.append('\n', '<autodoc>')
+
+ # plugin class
+ rst.append('Plugin', '<autodoc>')
+ rst.append('------', '<autodoc>')
+ rst.append('', '<autodoc>')
+
+ rst.append('.. autoclass :: %s\n' % plug_name, '<autodoc>')
+ rst.append(' :members:\n', '<autodoc>')
+ rst.append(' :show-inheritance:\n', '<autodoc>')
+ rst.append('', '<autodoc>')
+
+ # source
+ rst.append('Source', '<autodoc>')
+ rst.append('------', '<autodoc>')
+ rst.append(
+ '.. include :: %s\n' % utils.relative_path(
+ state_machine.document['source'],
+ os.path.abspath(mod.__file__.replace('.pyc', '.py'))),
+ '<autodoc>')
+ rst.append(' :literal:\n', '<autodoc>')
+ rst.append('', '<autodoc>')
+
+ node = nodes.section()
+ node.document = state.document
+ surrounding_title_styles = state.memo.title_styles
+ surrounding_section_level = state.memo.section_level
+ state.memo.title_styles = []
+ state.memo.section_level = 0
+ state.nested_parse(rst, 0, node, match_titles=1)
+ state.memo.title_styles = surrounding_title_styles
+ state.memo.section_level = surrounding_section_level
+
+ return node.children
+
+
+def autohelp_directive(dirname, arguments, options, content, lineno,
+ content_offset, block_text, state, state_machine):
+ """produces rst from nose help"""
+ config = Config(parserClass=OptBucket,
+ plugins=BuiltinPluginManager())
+ parser = config.getParser(TestProgram.usage())
+ rst = ViewList()
+ for line in parser.format_help().split('\n'):
+ rst.append(line, '<autodoc>')
+
+ rst.append('Options', '<autodoc>')
+ rst.append('-------', '<autodoc>')
+ rst.append('', '<autodoc>')
+ for opt in parser:
+ rst.append(opt.options(), '<autodoc>')
+ rst.append(' \n', '<autodoc>')
+ rst.append(' ' + opt.help + '\n', '<autodoc>')
+ rst.append('\n', '<autodoc>')
+ node = nodes.section()
+ node.document = state.document
+ surrounding_title_styles = state.memo.title_styles
+ surrounding_section_level = state.memo.section_level
+ state.memo.title_styles = []
+ state.memo.section_level = 0
+ state.nested_parse(rst, 0, node, match_titles=1)
+ state.memo.title_styles = surrounding_title_styles
+ state.memo.section_level = surrounding_section_level
+
+ return node.children
+
+
+class OptBucket(object):
+ def __init__(self, doc=None, prog='nosetests'):
+ self.opts = []
+ self.doc = doc
+ self.prog = prog
+
+ def __iter__(self):
+ return iter(self.opts)
+
+ def format_help(self):
+ return self.doc.replace('%prog', self.prog).replace(':\n', '::\n')
+
+ def add_option(self, *arg, **kw):
+ self.opts.append(Opt(*arg, **kw))
+
+
+class Opt(object):
+ def __init__(self, *arg, **kw):
+ self.opts = arg
+ self.action = kw.pop('action', None)
+ self.default = kw.pop('default', None)
+ self.metavar = kw.pop('metavar', None)
+ self.help = kw.pop('help', None)
+
+ def options(self):
+ buf = []
+ for optstring in self.opts:
+ desc = optstring
+ if self.action not in ('store_true', 'store_false'):
+ desc += '=%s' % self.meta(optstring)
+ buf.append(desc)
+ return '.. cmdoption :: ' + ', '.join(buf)
+
+ def meta(self, optstring):
+ # FIXME optparser default metavar?
+ return self.metavar or 'DEFAULT'
+
+
+def setup(app):
+ app.add_directive('autoplugin',
+ autoplugin_directive, 1, (1, 0, 1),
+ plugin=directives.unchanged)
+ app.add_directive('autohelp', autohelp_directive, 0, (0, 0, 1))
diff --git a/lib/spack/external/nose/suite.py b/lib/spack/external/nose/suite.py
new file mode 100644
index 0000000000..a831105e34
--- /dev/null
+++ b/lib/spack/external/nose/suite.py
@@ -0,0 +1,609 @@
+"""
+Test Suites
+-----------
+
+Provides a LazySuite, which is a suite whose test list is a generator
+function, and ContextSuite,which can run fixtures (setup/teardown
+functions or methods) for the context that contains its tests.
+
+"""
+from __future__ import generators
+
+import logging
+import sys
+import unittest
+from nose.case import Test
+from nose.config import Config
+from nose.proxy import ResultProxyFactory
+from nose.util import isclass, resolve_name, try_run
+
+if sys.platform == 'cli':
+ if sys.version_info[:2] < (2, 6):
+ import clr
+ clr.AddReference("IronPython")
+ from IronPython.Runtime.Exceptions import StringException
+ else:
+ class StringException(Exception):
+ pass
+
+log = logging.getLogger(__name__)
+#log.setLevel(logging.DEBUG)
+
+# Singleton for default value -- see ContextSuite.__init__ below
+_def = object()
+
+
+def _strclass(cls):
+ return "%s.%s" % (cls.__module__, cls.__name__)
+
+class MixedContextError(Exception):
+ """Error raised when a context suite sees tests from more than
+ one context.
+ """
+ pass
+
+
+class LazySuite(unittest.TestSuite):
+ """A suite that may use a generator as its list of tests
+ """
+ def __init__(self, tests=()):
+ """Initialize the suite. tests may be an iterable or a generator
+ """
+ super(LazySuite, self).__init__()
+ self._set_tests(tests)
+
+ def __iter__(self):
+ return iter(self._tests)
+
+ def __repr__(self):
+ return "<%s tests=generator (%s)>" % (
+ _strclass(self.__class__), id(self))
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ __str__ = __repr__
+
+ def addTest(self, test):
+ self._precache.append(test)
+
+ # added to bypass run changes in 2.7's unittest
+ def run(self, result):
+ for test in self._tests:
+ if result.shouldStop:
+ break
+ test(result)
+ return result
+
+ def __nonzero__(self):
+ log.debug("tests in %s?", id(self))
+ if self._precache:
+ return True
+ if self.test_generator is None:
+ return False
+ try:
+ test = self.test_generator.next()
+ if test is not None:
+ self._precache.append(test)
+ return True
+ except StopIteration:
+ pass
+ return False
+
+ def _get_tests(self):
+ log.debug("precache is %s", self._precache)
+ for test in self._precache:
+ yield test
+ if self.test_generator is None:
+ return
+ for test in self.test_generator:
+ yield test
+
+ def _set_tests(self, tests):
+ self._precache = []
+ is_suite = isinstance(tests, unittest.TestSuite)
+ if callable(tests) and not is_suite:
+ self.test_generator = tests()
+ elif is_suite:
+ # Suites need special treatment: they must be called like
+ # tests for their setup/teardown to run (if any)
+ self.addTests([tests])
+ self.test_generator = None
+ else:
+ self.addTests(tests)
+ self.test_generator = None
+
+ _tests = property(_get_tests, _set_tests, None,
+ "Access the tests in this suite. Access is through a "
+ "generator, so iteration may not be repeatable.")
+
+
+class ContextSuite(LazySuite):
+ """A suite with context.
+
+ A ContextSuite executes fixtures (setup and teardown functions or
+ methods) for the context containing its tests.
+
+ The context may be explicitly passed. If it is not, a context (or
+ nested set of contexts) will be constructed by examining the tests
+ in the suite.
+ """
+ failureException = unittest.TestCase.failureException
+ was_setup = False
+ was_torndown = False
+ classSetup = ('setup_class', 'setup_all', 'setupClass', 'setupAll',
+ 'setUpClass', 'setUpAll')
+ classTeardown = ('teardown_class', 'teardown_all', 'teardownClass',
+ 'teardownAll', 'tearDownClass', 'tearDownAll')
+ moduleSetup = ('setup_module', 'setupModule', 'setUpModule', 'setup',
+ 'setUp')
+ moduleTeardown = ('teardown_module', 'teardownModule', 'tearDownModule',
+ 'teardown', 'tearDown')
+ packageSetup = ('setup_package', 'setupPackage', 'setUpPackage')
+ packageTeardown = ('teardown_package', 'teardownPackage',
+ 'tearDownPackage')
+
+ def __init__(self, tests=(), context=None, factory=None,
+ config=None, resultProxy=None, can_split=True):
+ log.debug("Context suite for %s (%s) (%s)", tests, context, id(self))
+ self.context = context
+ self.factory = factory
+ if config is None:
+ config = Config()
+ self.config = config
+ self.resultProxy = resultProxy
+ self.has_run = False
+ self.can_split = can_split
+ self.error_context = None
+ super(ContextSuite, self).__init__(tests)
+
+ def __repr__(self):
+ return "<%s context=%s>" % (
+ _strclass(self.__class__),
+ getattr(self.context, '__name__', self.context))
+ __str__ = __repr__
+
+ def id(self):
+ if self.error_context:
+ return '%s:%s' % (repr(self), self.error_context)
+ else:
+ return repr(self)
+
+ def __hash__(self):
+ return object.__hash__(self)
+
+ # 2.3 compat -- force 2.4 call sequence
+ def __call__(self, *arg, **kw):
+ return self.run(*arg, **kw)
+
+ def exc_info(self):
+ """Hook for replacing error tuple output
+ """
+ return sys.exc_info()
+
+ def _exc_info(self):
+ """Bottleneck to fix up IronPython string exceptions
+ """
+ e = self.exc_info()
+ if sys.platform == 'cli':
+ if isinstance(e[0], StringException):
+ # IronPython throws these StringExceptions, but
+ # traceback checks type(etype) == str. Make a real
+ # string here.
+ e = (str(e[0]), e[1], e[2])
+
+ return e
+
+ def run(self, result):
+ """Run tests in suite inside of suite fixtures.
+ """
+ # proxy the result for myself
+ log.debug("suite %s (%s) run called, tests: %s", id(self), self, self._tests)
+ #import pdb
+ #pdb.set_trace()
+ if self.resultProxy:
+ result, orig = self.resultProxy(result, self), result
+ else:
+ result, orig = result, result
+ try:
+ self.setUp()
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.error_context = 'setup'
+ result.addError(self, self._exc_info())
+ return
+ try:
+ for test in self._tests:
+ if result.shouldStop:
+ log.debug("stopping")
+ break
+ # each nose.case.Test will create its own result proxy
+ # so the cases need the original result, to avoid proxy
+ # chains
+ test(orig)
+ finally:
+ self.has_run = True
+ try:
+ self.tearDown()
+ except KeyboardInterrupt:
+ raise
+ except:
+ self.error_context = 'teardown'
+ result.addError(self, self._exc_info())
+
+ def hasFixtures(self, ctx_callback=None):
+ context = self.context
+ if context is None:
+ return False
+ if self.implementsAnyFixture(context, ctx_callback=ctx_callback):
+ return True
+ # My context doesn't have any, but its ancestors might
+ factory = self.factory
+ if factory:
+ ancestors = factory.context.get(self, [])
+ for ancestor in ancestors:
+ if self.implementsAnyFixture(
+ ancestor, ctx_callback=ctx_callback):
+ return True
+ return False
+
+ def implementsAnyFixture(self, context, ctx_callback):
+ if isclass(context):
+ names = self.classSetup + self.classTeardown
+ else:
+ names = self.moduleSetup + self.moduleTeardown
+ if hasattr(context, '__path__'):
+ names += self.packageSetup + self.packageTeardown
+ # If my context has any fixture attribute, I have fixtures
+ fixt = False
+ for m in names:
+ if hasattr(context, m):
+ fixt = True
+ break
+ if ctx_callback is None:
+ return fixt
+ return ctx_callback(context, fixt)
+
+ def setUp(self):
+ log.debug("suite %s setUp called, tests: %s", id(self), self._tests)
+ if not self:
+ # I have no tests
+ log.debug("suite %s has no tests", id(self))
+ return
+ if self.was_setup:
+ log.debug("suite %s already set up", id(self))
+ return
+ context = self.context
+ if context is None:
+ return
+ # before running my own context's setup, I need to
+ # ask the factory if my context's contexts' setups have been run
+ factory = self.factory
+ if factory:
+ # get a copy, since we'll be destroying it as we go
+ ancestors = factory.context.get(self, [])[:]
+ while ancestors:
+ ancestor = ancestors.pop()
+ log.debug("ancestor %s may need setup", ancestor)
+ if ancestor in factory.was_setup:
+ continue
+ log.debug("ancestor %s does need setup", ancestor)
+ self.setupContext(ancestor)
+ if not context in factory.was_setup:
+ self.setupContext(context)
+ else:
+ self.setupContext(context)
+ self.was_setup = True
+ log.debug("completed suite setup")
+
+ def setupContext(self, context):
+ self.config.plugins.startContext(context)
+ log.debug("%s setup context %s", self, context)
+ if self.factory:
+ if context in self.factory.was_setup:
+ return
+ # note that I ran the setup for this context, so that I'll run
+ # the teardown in my teardown
+ self.factory.was_setup[context] = self
+ if isclass(context):
+ names = self.classSetup
+ else:
+ names = self.moduleSetup
+ if hasattr(context, '__path__'):
+ names = self.packageSetup + names
+ try_run(context, names)
+
+ def shortDescription(self):
+ if self.context is None:
+ return "test suite"
+ return "test suite for %s" % self.context
+
+ def tearDown(self):
+ log.debug('context teardown')
+ if not self.was_setup or self.was_torndown:
+ log.debug(
+ "No reason to teardown (was_setup? %s was_torndown? %s)"
+ % (self.was_setup, self.was_torndown))
+ return
+ self.was_torndown = True
+ context = self.context
+ if context is None:
+ log.debug("No context to tear down")
+ return
+
+ # for each ancestor... if the ancestor was setup
+ # and I did the setup, I can do teardown
+ factory = self.factory
+ if factory:
+ ancestors = factory.context.get(self, []) + [context]
+ for ancestor in ancestors:
+ log.debug('ancestor %s may need teardown', ancestor)
+ if not ancestor in factory.was_setup:
+ log.debug('ancestor %s was not setup', ancestor)
+ continue
+ if ancestor in factory.was_torndown:
+ log.debug('ancestor %s already torn down', ancestor)
+ continue
+ setup = factory.was_setup[ancestor]
+ log.debug("%s setup ancestor %s", setup, ancestor)
+ if setup is self:
+ self.teardownContext(ancestor)
+ else:
+ self.teardownContext(context)
+
+ def teardownContext(self, context):
+ log.debug("%s teardown context %s", self, context)
+ if self.factory:
+ if context in self.factory.was_torndown:
+ return
+ self.factory.was_torndown[context] = self
+ if isclass(context):
+ names = self.classTeardown
+ else:
+ names = self.moduleTeardown
+ if hasattr(context, '__path__'):
+ names = self.packageTeardown + names
+ try_run(context, names)
+ self.config.plugins.stopContext(context)
+
+ # FIXME the wrapping has to move to the factory?
+ def _get_wrapped_tests(self):
+ for test in self._get_tests():
+ if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
+ yield test
+ else:
+ yield Test(test,
+ config=self.config,
+ resultProxy=self.resultProxy)
+
+ _tests = property(_get_wrapped_tests, LazySuite._set_tests, None,
+ "Access the tests in this suite. Tests are returned "
+ "inside of a context wrapper.")
+
+
+class ContextSuiteFactory(object):
+ """Factory for ContextSuites. Called with a collection of tests,
+ the factory decides on a hierarchy of contexts by introspecting
+ the collection or the tests themselves to find the objects
+ containing the test objects. It always returns one suite, but that
+ suite may consist of a hierarchy of nested suites.
+ """
+ suiteClass = ContextSuite
+ def __init__(self, config=None, suiteClass=None, resultProxy=_def):
+ if config is None:
+ config = Config()
+ self.config = config
+ if suiteClass is not None:
+ self.suiteClass = suiteClass
+ # Using a singleton to represent default instead of None allows
+ # passing resultProxy=None to turn proxying off.
+ if resultProxy is _def:
+ resultProxy = ResultProxyFactory(config=config)
+ self.resultProxy = resultProxy
+ self.suites = {}
+ self.context = {}
+ self.was_setup = {}
+ self.was_torndown = {}
+
+ def __call__(self, tests, **kw):
+ """Return ``ContextSuite`` for tests. ``tests`` may either
+ be a callable (in which case the resulting ContextSuite will
+ have no parent context and be evaluated lazily) or an
+ iterable. In that case the tests will wrapped in
+ nose.case.Test, be examined and the context of each found and a
+ suite of suites returned, organized into a stack with the
+ outermost suites belonging to the outermost contexts.
+ """
+ log.debug("Create suite for %s", tests)
+ context = kw.pop('context', getattr(tests, 'context', None))
+ log.debug("tests %s context %s", tests, context)
+ if context is None:
+ tests = self.wrapTests(tests)
+ try:
+ context = self.findContext(tests)
+ except MixedContextError:
+ return self.makeSuite(self.mixedSuites(tests), None, **kw)
+ return self.makeSuite(tests, context, **kw)
+
+ def ancestry(self, context):
+ """Return the ancestry of the context (that is, all of the
+ packages and modules containing the context), in order of
+ descent with the outermost ancestor last.
+ This method is a generator.
+ """
+ log.debug("get ancestry %s", context)
+ if context is None:
+ return
+ # Methods include reference to module they are defined in, we
+ # don't want that, instead want the module the class is in now
+ # (classes are re-ancestored elsewhere).
+ if hasattr(context, 'im_class'):
+ context = context.im_class
+ elif hasattr(context, '__self__'):
+ context = context.__self__.__class__
+ if hasattr(context, '__module__'):
+ ancestors = context.__module__.split('.')
+ elif hasattr(context, '__name__'):
+ ancestors = context.__name__.split('.')[:-1]
+ else:
+ raise TypeError("%s has no ancestors?" % context)
+ while ancestors:
+ log.debug(" %s ancestors %s", context, ancestors)
+ yield resolve_name('.'.join(ancestors))
+ ancestors.pop()
+
+ def findContext(self, tests):
+ if callable(tests) or isinstance(tests, unittest.TestSuite):
+ return None
+ context = None
+ for test in tests:
+ # Don't look at suites for contexts, only tests
+ ctx = getattr(test, 'context', None)
+ if ctx is None:
+ continue
+ if context is None:
+ context = ctx
+ elif context != ctx:
+ raise MixedContextError(
+ "Tests with different contexts in same suite! %s != %s"
+ % (context, ctx))
+ return context
+
+ def makeSuite(self, tests, context, **kw):
+ suite = self.suiteClass(
+ tests, context=context, config=self.config, factory=self,
+ resultProxy=self.resultProxy, **kw)
+ if context is not None:
+ self.suites.setdefault(context, []).append(suite)
+ self.context.setdefault(suite, []).append(context)
+ log.debug("suite %s has context %s", suite,
+ getattr(context, '__name__', None))
+ for ancestor in self.ancestry(context):
+ self.suites.setdefault(ancestor, []).append(suite)
+ self.context[suite].append(ancestor)
+ log.debug("suite %s has ancestor %s", suite, ancestor.__name__)
+ return suite
+
+ def mixedSuites(self, tests):
+ """The complex case where there are tests that don't all share
+ the same context. Groups tests into suites with common ancestors,
+ according to the following (essentially tail-recursive) procedure:
+
+ Starting with the context of the first test, if it is not
+ None, look for tests in the remaining tests that share that
+ ancestor. If any are found, group into a suite with that
+ ancestor as the context, and replace the current suite with
+ that suite. Continue this process for each ancestor of the
+ first test, until all ancestors have been processed. At this
+ point if any tests remain, recurse with those tests as the
+ input, returning a list of the common suite (which may be the
+ suite or test we started with, if no common tests were found)
+ plus the results of recursion.
+ """
+ if not tests:
+ return []
+ head = tests.pop(0)
+ if not tests:
+ return [head] # short circuit when none are left to combine
+ suite = head # the common ancestry suite, so far
+ tail = tests[:]
+ context = getattr(head, 'context', None)
+ if context is not None:
+ ancestors = [context] + [a for a in self.ancestry(context)]
+ for ancestor in ancestors:
+ common = [suite] # tests with ancestor in common, so far
+ remain = [] # tests that remain to be processed
+ for test in tail:
+ found_common = False
+ test_ctx = getattr(test, 'context', None)
+ if test_ctx is None:
+ remain.append(test)
+ continue
+ if test_ctx is ancestor:
+ common.append(test)
+ continue
+ for test_ancestor in self.ancestry(test_ctx):
+ if test_ancestor is ancestor:
+ common.append(test)
+ found_common = True
+ break
+ if not found_common:
+ remain.append(test)
+ if common:
+ suite = self.makeSuite(common, ancestor)
+ tail = self.mixedSuites(remain)
+ return [suite] + tail
+
+ def wrapTests(self, tests):
+ log.debug("wrap %s", tests)
+ if callable(tests) or isinstance(tests, unittest.TestSuite):
+ log.debug("I won't wrap")
+ return tests
+ wrapped = []
+ for test in tests:
+ log.debug("wrapping %s", test)
+ if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
+ wrapped.append(test)
+ elif isinstance(test, ContextList):
+ wrapped.append(self.makeSuite(test, context=test.context))
+ else:
+ wrapped.append(
+ Test(test, config=self.config, resultProxy=self.resultProxy)
+ )
+ return wrapped
+
+
+class ContextList(object):
+ """Not quite a suite -- a group of tests in a context. This is used
+ to hint the ContextSuiteFactory about what context the tests
+ belong to, in cases where it may be ambiguous or missing.
+ """
+ def __init__(self, tests, context=None):
+ self.tests = tests
+ self.context = context
+
+ def __iter__(self):
+ return iter(self.tests)
+
+
+class FinalizingSuiteWrapper(unittest.TestSuite):
+ """Wraps suite and calls final function after suite has
+ executed. Used to call final functions in cases (like running in
+ the standard test runner) where test running is not under nose's
+ control.
+ """
+ def __init__(self, suite, finalize):
+ super(FinalizingSuiteWrapper, self).__init__()
+ self.suite = suite
+ self.finalize = finalize
+
+ def __call__(self, *arg, **kw):
+ return self.run(*arg, **kw)
+
+ # 2.7 compat
+ def __iter__(self):
+ return iter(self.suite)
+
+ def run(self, *arg, **kw):
+ try:
+ return self.suite(*arg, **kw)
+ finally:
+ self.finalize(*arg, **kw)
+
+
+# backwards compat -- sort of
+class TestDir:
+ def __init__(*arg, **kw):
+ raise NotImplementedError(
+ "TestDir is not usable with nose 0.10. The class is present "
+ "in nose.suite for backwards compatibility purposes but it "
+ "may not be used.")
+
+
+class TestModule:
+ def __init__(*arg, **kw):
+ raise NotImplementedError(
+ "TestModule is not usable with nose 0.10. The class is present "
+ "in nose.suite for backwards compatibility purposes but it "
+ "may not be used.")
diff --git a/lib/spack/external/nose/tools/__init__.py b/lib/spack/external/nose/tools/__init__.py
new file mode 100644
index 0000000000..74dab16a74
--- /dev/null
+++ b/lib/spack/external/nose/tools/__init__.py
@@ -0,0 +1,15 @@
+"""
+Tools for testing
+-----------------
+
+nose.tools provides a few convenience functions to make writing tests
+easier. You don't have to use them; nothing in the rest of nose depends
+on any of these methods.
+
+"""
+from nose.tools.nontrivial import *
+from nose.tools.nontrivial import __all__ as nontrivial_all
+from nose.tools.trivial import *
+from nose.tools.trivial import __all__ as trivial_all
+
+__all__ = trivial_all + nontrivial_all
diff --git a/lib/spack/external/nose/tools/nontrivial.py b/lib/spack/external/nose/tools/nontrivial.py
new file mode 100644
index 0000000000..283973245b
--- /dev/null
+++ b/lib/spack/external/nose/tools/nontrivial.py
@@ -0,0 +1,151 @@
+"""Tools not exempt from being descended into in tracebacks"""
+
+import time
+
+
+__all__ = ['make_decorator', 'raises', 'set_trace', 'timed', 'with_setup',
+ 'TimeExpired', 'istest', 'nottest']
+
+
+class TimeExpired(AssertionError):
+ pass
+
+
+def make_decorator(func):
+ """
+ Wraps a test decorator so as to properly replicate metadata
+ of the decorated function, including nose's additional stuff
+ (namely, setup and teardown).
+ """
+ def decorate(newfunc):
+ if hasattr(func, 'compat_func_name'):
+ name = func.compat_func_name
+ else:
+ name = func.__name__
+ newfunc.__dict__ = func.__dict__
+ newfunc.__doc__ = func.__doc__
+ newfunc.__module__ = func.__module__
+ if not hasattr(newfunc, 'compat_co_firstlineno'):
+ newfunc.compat_co_firstlineno = func.func_code.co_firstlineno
+ try:
+ newfunc.__name__ = name
+ except TypeError:
+ # can't set func name in 2.3
+ newfunc.compat_func_name = name
+ return newfunc
+ return decorate
+
+
+def raises(*exceptions):
+ """Test must raise one of expected exceptions to pass.
+
+ Example use::
+
+ @raises(TypeError, ValueError)
+ def test_raises_type_error():
+ raise TypeError("This test passes")
+
+ @raises(Exception)
+ def test_that_fails_by_passing():
+ pass
+
+ If you want to test many assertions about exceptions in a single test,
+ you may want to use `assert_raises` instead.
+ """
+ valid = ' or '.join([e.__name__ for e in exceptions])
+ def decorate(func):
+ name = func.__name__
+ def newfunc(*arg, **kw):
+ try:
+ func(*arg, **kw)
+ except exceptions:
+ pass
+ except:
+ raise
+ else:
+ message = "%s() did not raise %s" % (name, valid)
+ raise AssertionError(message)
+ newfunc = make_decorator(func)(newfunc)
+ return newfunc
+ return decorate
+
+
+def set_trace():
+ """Call pdb.set_trace in the calling frame, first restoring
+ sys.stdout to the real output stream. Note that sys.stdout is NOT
+ reset to whatever it was before the call once pdb is done!
+ """
+ import pdb
+ import sys
+ stdout = sys.stdout
+ sys.stdout = sys.__stdout__
+ pdb.Pdb().set_trace(sys._getframe().f_back)
+
+
+def timed(limit):
+ """Test must finish within specified time limit to pass.
+
+ Example use::
+
+ @timed(.1)
+ def test_that_fails():
+ time.sleep(.2)
+ """
+ def decorate(func):
+ def newfunc(*arg, **kw):
+ start = time.time()
+ result = func(*arg, **kw)
+ end = time.time()
+ if end - start > limit:
+ raise TimeExpired("Time limit (%s) exceeded" % limit)
+ return result
+ newfunc = make_decorator(func)(newfunc)
+ return newfunc
+ return decorate
+
+
+def with_setup(setup=None, teardown=None):
+ """Decorator to add setup and/or teardown methods to a test function::
+
+ @with_setup(setup, teardown)
+ def test_something():
+ " ... "
+
+ Note that `with_setup` is useful *only* for test functions, not for test
+ methods or inside of TestCase subclasses.
+ """
+ def decorate(func, setup=setup, teardown=teardown):
+ if setup:
+ if hasattr(func, 'setup'):
+ _old_s = func.setup
+ def _s():
+ setup()
+ _old_s()
+ func.setup = _s
+ else:
+ func.setup = setup
+ if teardown:
+ if hasattr(func, 'teardown'):
+ _old_t = func.teardown
+ def _t():
+ _old_t()
+ teardown()
+ func.teardown = _t
+ else:
+ func.teardown = teardown
+ return func
+ return decorate
+
+
+def istest(func):
+ """Decorator to mark a function or method as a test
+ """
+ func.__test__ = True
+ return func
+
+
+def nottest(func):
+ """Decorator to mark a function or method as *not* a test
+ """
+ func.__test__ = False
+ return func
diff --git a/lib/spack/external/nose/tools/trivial.py b/lib/spack/external/nose/tools/trivial.py
new file mode 100644
index 0000000000..cf83efeda5
--- /dev/null
+++ b/lib/spack/external/nose/tools/trivial.py
@@ -0,0 +1,54 @@
+"""Tools so trivial that tracebacks should not descend into them
+
+We define the ``__unittest`` symbol in their module namespace so unittest will
+skip them when printing tracebacks, just as it does for their corresponding
+methods in ``unittest`` proper.
+
+"""
+import re
+import unittest
+
+
+__all__ = ['ok_', 'eq_']
+
+# Use the same flag as unittest itself to prevent descent into these functions:
+__unittest = 1
+
+
+def ok_(expr, msg=None):
+ """Shorthand for assert. Saves 3 whole characters!
+ """
+ if not expr:
+ raise AssertionError(msg)
+
+
+def eq_(a, b, msg=None):
+ """Shorthand for 'assert a == b, "%r != %r" % (a, b)
+ """
+ if not a == b:
+ raise AssertionError(msg or "%r != %r" % (a, b))
+
+
+#
+# Expose assert* from unittest.TestCase
+# - give them pep8 style names
+#
+caps = re.compile('([A-Z])')
+
+def pep8(name):
+ return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
+
+class Dummy(unittest.TestCase):
+ def nop():
+ pass
+_t = Dummy('nop')
+
+for at in [ at for at in dir(_t)
+ if at.startswith('assert') and not '_' in at ]:
+ pepd = pep8(at)
+ vars()[pepd] = getattr(_t, at)
+ __all__.append(pepd)
+
+del Dummy
+del _t
+del pep8
diff --git a/lib/spack/external/nose/twistedtools.py b/lib/spack/external/nose/twistedtools.py
new file mode 100644
index 0000000000..8d9c6ffe9b
--- /dev/null
+++ b/lib/spack/external/nose/twistedtools.py
@@ -0,0 +1,173 @@
+"""
+Twisted integration
+-------------------
+
+This module provides a very simple way to integrate your tests with the
+Twisted_ event loop.
+
+You must import this module *before* importing anything from Twisted itself!
+
+Example::
+
+ from nose.twistedtools import reactor, deferred
+
+ @deferred()
+ def test_resolve():
+ return reactor.resolve("www.python.org")
+
+Or, more realistically::
+
+ @deferred(timeout=5.0)
+ def test_resolve():
+ d = reactor.resolve("www.python.org")
+ def check_ip(ip):
+ assert ip == "67.15.36.43"
+ d.addCallback(check_ip)
+ return d
+
+.. _Twisted: http://twistedmatrix.com/trac/
+"""
+
+import sys
+from Queue import Queue, Empty
+from nose.tools import make_decorator, TimeExpired
+
+__all__ = [
+ 'threaded_reactor', 'reactor', 'deferred', 'TimeExpired',
+ 'stop_reactor'
+]
+
+_twisted_thread = None
+
+def threaded_reactor():
+ """
+ Start the Twisted reactor in a separate thread, if not already done.
+ Returns the reactor.
+ The thread will automatically be destroyed when all the tests are done.
+ """
+ global _twisted_thread
+ try:
+ from twisted.internet import reactor
+ except ImportError:
+ return None, None
+ if not _twisted_thread:
+ from twisted.python import threadable
+ from threading import Thread
+ _twisted_thread = Thread(target=lambda: reactor.run( \
+ installSignalHandlers=False))
+ _twisted_thread.setDaemon(True)
+ _twisted_thread.start()
+ return reactor, _twisted_thread
+
+# Export global reactor variable, as Twisted does
+reactor, reactor_thread = threaded_reactor()
+
+
+def stop_reactor():
+ """Stop the reactor and join the reactor thread until it stops.
+ Call this function in teardown at the module or package level to
+ reset the twisted system after your tests. You *must* do this if
+ you mix tests using these tools and tests using twisted.trial.
+ """
+ global _twisted_thread
+
+ def stop_reactor():
+ '''Helper for calling stop from withing the thread.'''
+ reactor.stop()
+
+ reactor.callFromThread(stop_reactor)
+ reactor_thread.join()
+ for p in reactor.getDelayedCalls():
+ if p.active():
+ p.cancel()
+ _twisted_thread = None
+
+
+def deferred(timeout=None):
+ """
+ By wrapping a test function with this decorator, you can return a
+ twisted Deferred and the test will wait for the deferred to be triggered.
+ The whole test function will run inside the Twisted event loop.
+
+ The optional timeout parameter specifies the maximum duration of the test.
+ The difference with timed() is that timed() will still wait for the test
+ to end, while deferred() will stop the test when its timeout has expired.
+ The latter is more desireable when dealing with network tests, because
+ the result may actually never arrive.
+
+ If the callback is triggered, the test has passed.
+ If the errback is triggered or the timeout expires, the test has failed.
+
+ Example::
+
+ @deferred(timeout=5.0)
+ def test_resolve():
+ return reactor.resolve("www.python.org")
+
+ Attention! If you combine this decorator with other decorators (like
+ "raises"), deferred() must be called *first*!
+
+ In other words, this is good::
+
+ @raises(DNSLookupError)
+ @deferred()
+ def test_error():
+ return reactor.resolve("xxxjhjhj.biz")
+
+ and this is bad::
+
+ @deferred()
+ @raises(DNSLookupError)
+ def test_error():
+ return reactor.resolve("xxxjhjhj.biz")
+ """
+ reactor, reactor_thread = threaded_reactor()
+ if reactor is None:
+ raise ImportError("twisted is not available or could not be imported")
+ # Check for common syntax mistake
+ # (otherwise, tests can be silently ignored
+ # if one writes "@deferred" instead of "@deferred()")
+ try:
+ timeout is None or timeout + 0
+ except TypeError:
+ raise TypeError("'timeout' argument must be a number or None")
+
+ def decorate(func):
+ def wrapper(*args, **kargs):
+ q = Queue()
+ def callback(value):
+ q.put(None)
+ def errback(failure):
+ # Retrieve and save full exception info
+ try:
+ failure.raiseException()
+ except:
+ q.put(sys.exc_info())
+ def g():
+ try:
+ d = func(*args, **kargs)
+ try:
+ d.addCallbacks(callback, errback)
+ # Check for a common mistake and display a nice error
+ # message
+ except AttributeError:
+ raise TypeError("you must return a twisted Deferred "
+ "from your test case!")
+ # Catch exceptions raised in the test body (from the
+ # Twisted thread)
+ except:
+ q.put(sys.exc_info())
+ reactor.callFromThread(g)
+ try:
+ error = q.get(timeout=timeout)
+ except Empty:
+ raise TimeExpired("timeout expired before end of test (%f s.)"
+ % timeout)
+ # Re-raise all exceptions
+ if error is not None:
+ exc_type, exc_value, tb = error
+ raise exc_type, exc_value, tb
+ wrapper = make_decorator(func)(wrapper)
+ return wrapper
+ return decorate
+
diff --git a/lib/spack/external/nose/usage.txt b/lib/spack/external/nose/usage.txt
new file mode 100644
index 0000000000..bc96894ab7
--- /dev/null
+++ b/lib/spack/external/nose/usage.txt
@@ -0,0 +1,115 @@
+nose collects tests automatically from python source files,
+directories and packages found in its working directory (which
+defaults to the current working directory). Any python source file,
+directory or package that matches the testMatch regular expression
+(by default: `(?:^|[\b_\.-])[Tt]est)` will be collected as a test (or
+source for collection of tests). In addition, all other packages
+found in the working directory will be examined for python source files
+or directories that match testMatch. Package discovery descends all
+the way down the tree, so package.tests and package.sub.tests and
+package.sub.sub2.tests will all be collected.
+
+Within a test directory or package, any python source file matching
+testMatch will be examined for test cases. Within a test module,
+functions and classes whose names match testMatch and TestCase
+subclasses with any name will be loaded and executed as tests. Tests
+may use the assert keyword or raise AssertionErrors to indicate test
+failure. TestCase subclasses may do the same or use the various
+TestCase methods available.
+
+**It is important to note that the default behavior of nose is to
+not include tests from files which are executable.** To include
+tests from such files, remove their executable bit or use
+the --exe flag (see 'Options' section below).
+
+Selecting Tests
+---------------
+
+To specify which tests to run, pass test names on the command line:
+
+ %prog only_test_this.py
+
+Test names specified may be file or module names, and may optionally
+indicate the test case to run by separating the module or file name
+from the test case name with a colon. Filenames may be relative or
+absolute. Examples:
+
+ %prog test.module
+ %prog another.test:TestCase.test_method
+ %prog a.test:TestCase
+ %prog /path/to/test/file.py:test_function
+
+You may also change the working directory where nose looks for tests
+by using the -w switch:
+
+ %prog -w /path/to/tests
+
+Note, however, that support for multiple -w arguments is now deprecated
+and will be removed in a future release. As of nose 0.10, you can get
+the same behavior by specifying the target directories *without*
+the -w switch:
+
+ %prog /path/to/tests /another/path/to/tests
+
+Further customization of test selection and loading is possible
+through the use of plugins.
+
+Test result output is identical to that of unittest, except for
+the additional features (error classes, and plugin-supplied
+features such as output capture and assert introspection) detailed
+in the options below.
+
+Configuration
+-------------
+
+In addition to passing command-line options, you may also put
+configuration options in your project's *setup.cfg* file, or a .noserc
+or nose.cfg file in your home directory. In any of these standard
+ini-style config files, you put your nosetests configuration in a
+``[nosetests]`` section. Options are the same as on the command line,
+with the -- prefix removed. For options that are simple switches, you
+must supply a value:
+
+ [nosetests]
+ verbosity=3
+ with-doctest=1
+
+All configuration files that are found will be loaded and their
+options combined. You can override the standard config file loading
+with the ``-c`` option.
+
+Using Plugins
+-------------
+
+There are numerous nose plugins available via easy_install and
+elsewhere. To use a plugin, just install it. The plugin will add
+command line options to nosetests. To verify that the plugin is installed,
+run:
+
+ nosetests --plugins
+
+You can add -v or -vv to that command to show more information
+about each plugin.
+
+If you are running nose.main() or nose.run() from a script, you
+can specify a list of plugins to use by passing a list of plugins
+with the plugins keyword argument.
+
+0.9 plugins
+-----------
+
+nose 1.0 can use SOME plugins that were written for nose 0.9. The
+default plugin manager inserts a compatibility wrapper around 0.9
+plugins that adapts the changed plugin api calls. However, plugins
+that access nose internals are likely to fail, especially if they
+attempt to access test case or test suite classes. For example,
+plugins that try to determine if a test passed to startTest is an
+individual test or a suite will fail, partly because suites are no
+longer passed to startTest and partly because it's likely that the
+plugin is trying to find out if the test is an instance of a class
+that no longer exists.
+
+0.10 and 0.11 plugins
+---------------------
+
+All plugins written for nose 0.10 and 0.11 should work with nose 1.0.
diff --git a/lib/spack/external/nose/util.py b/lib/spack/external/nose/util.py
new file mode 100644
index 0000000000..bfe16589ea
--- /dev/null
+++ b/lib/spack/external/nose/util.py
@@ -0,0 +1,668 @@
+"""Utility functions and classes used by nose internally.
+"""
+import inspect
+import itertools
+import logging
+import stat
+import os
+import re
+import sys
+import types
+import unittest
+from nose.pyversion import ClassType, TypeType, isgenerator, ismethod
+
+
+log = logging.getLogger('nose')
+
+ident_re = re.compile(r'^[A-Za-z_][A-Za-z0-9_.]*$')
+class_types = (ClassType, TypeType)
+skip_pattern = r"(?:\.svn)|(?:[^.]+\.py[co])|(?:.*~)|(?:.*\$py\.class)|(?:__pycache__)"
+
+try:
+ set()
+ set = set # make from nose.util import set happy
+except NameError:
+ try:
+ from sets import Set as set
+ except ImportError:
+ pass
+
+
+def ls_tree(dir_path="",
+ skip_pattern=skip_pattern,
+ indent="|-- ", branch_indent="| ",
+ last_indent="`-- ", last_branch_indent=" "):
+ # TODO: empty directories look like non-directory files
+ return "\n".join(_ls_tree_lines(dir_path, skip_pattern,
+ indent, branch_indent,
+ last_indent, last_branch_indent))
+
+
+def _ls_tree_lines(dir_path, skip_pattern,
+ indent, branch_indent, last_indent, last_branch_indent):
+ if dir_path == "":
+ dir_path = os.getcwd()
+
+ lines = []
+
+ names = os.listdir(dir_path)
+ names.sort()
+ dirs, nondirs = [], []
+ for name in names:
+ if re.match(skip_pattern, name):
+ continue
+ if os.path.isdir(os.path.join(dir_path, name)):
+ dirs.append(name)
+ else:
+ nondirs.append(name)
+
+ # list non-directories first
+ entries = list(itertools.chain([(name, False) for name in nondirs],
+ [(name, True) for name in dirs]))
+ def ls_entry(name, is_dir, ind, branch_ind):
+ if not is_dir:
+ yield ind + name
+ else:
+ path = os.path.join(dir_path, name)
+ if not os.path.islink(path):
+ yield ind + name
+ subtree = _ls_tree_lines(path, skip_pattern,
+ indent, branch_indent,
+ last_indent, last_branch_indent)
+ for x in subtree:
+ yield branch_ind + x
+ for name, is_dir in entries[:-1]:
+ for line in ls_entry(name, is_dir, indent, branch_indent):
+ yield line
+ if entries:
+ name, is_dir = entries[-1]
+ for line in ls_entry(name, is_dir, last_indent, last_branch_indent):
+ yield line
+
+
+def absdir(path):
+ """Return absolute, normalized path to directory, if it exists; None
+ otherwise.
+ """
+ if not os.path.isabs(path):
+ path = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(),
+ path)))
+ if path is None or not os.path.isdir(path):
+ return None
+ return path
+
+
+def absfile(path, where=None):
+ """Return absolute, normalized path to file (optionally in directory
+ where), or None if the file can't be found either in where or the current
+ working directory.
+ """
+ orig = path
+ if where is None:
+ where = os.getcwd()
+ if isinstance(where, list) or isinstance(where, tuple):
+ for maybe_path in where:
+ maybe_abs = absfile(path, maybe_path)
+ if maybe_abs is not None:
+ return maybe_abs
+ return None
+ if not os.path.isabs(path):
+ path = os.path.normpath(os.path.abspath(os.path.join(where, path)))
+ if path is None or not os.path.exists(path):
+ if where != os.getcwd():
+ # try the cwd instead
+ path = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(),
+ orig)))
+ if path is None or not os.path.exists(path):
+ return None
+ if os.path.isdir(path):
+ # might want an __init__.py from pacakge
+ init = os.path.join(path,'__init__.py')
+ if os.path.isfile(init):
+ return init
+ elif os.path.isfile(path):
+ return path
+ return None
+
+
+def anyp(predicate, iterable):
+ for item in iterable:
+ if predicate(item):
+ return True
+ return False
+
+
+def file_like(name):
+ """A name is file-like if it is a path that exists, or it has a
+ directory part, or it ends in .py, or it isn't a legal python
+ identifier.
+ """
+ return (os.path.exists(name)
+ or os.path.dirname(name)
+ or name.endswith('.py')
+ or not ident_re.match(os.path.splitext(name)[0]))
+
+
+def func_lineno(func):
+ """Get the line number of a function. First looks for
+ compat_co_firstlineno, then func_code.co_first_lineno.
+ """
+ try:
+ return func.compat_co_firstlineno
+ except AttributeError:
+ try:
+ return func.func_code.co_firstlineno
+ except AttributeError:
+ return -1
+
+
+def isclass(obj):
+ """Is obj a class? Inspect's isclass is too liberal and returns True
+ for objects that can't be subclasses of anything.
+ """
+ obj_type = type(obj)
+ return obj_type in class_types or issubclass(obj_type, type)
+
+
+# backwards compat (issue #64)
+is_generator = isgenerator
+
+
+def ispackage(path):
+ """
+ Is this path a package directory?
+
+ >>> ispackage('nose')
+ True
+ >>> ispackage('unit_tests')
+ False
+ >>> ispackage('nose/plugins')
+ True
+ >>> ispackage('nose/loader.py')
+ False
+ """
+ if os.path.isdir(path):
+ # at least the end of the path must be a legal python identifier
+ # and __init__.py[co] must exist
+ end = os.path.basename(path)
+ if ident_re.match(end):
+ for init in ('__init__.py', '__init__.pyc', '__init__.pyo'):
+ if os.path.isfile(os.path.join(path, init)):
+ return True
+ if sys.platform.startswith('java') and \
+ os.path.isfile(os.path.join(path, '__init__$py.class')):
+ return True
+ return False
+
+
+def isproperty(obj):
+ """
+ Is this a property?
+
+ >>> class Foo:
+ ... def got(self):
+ ... return 2
+ ... def get(self):
+ ... return 1
+ ... get = property(get)
+
+ >>> isproperty(Foo.got)
+ False
+ >>> isproperty(Foo.get)
+ True
+ """
+ return type(obj) == property
+
+
+def getfilename(package, relativeTo=None):
+ """Find the python source file for a package, relative to a
+ particular directory (defaults to current working directory if not
+ given).
+ """
+ if relativeTo is None:
+ relativeTo = os.getcwd()
+ path = os.path.join(relativeTo, os.sep.join(package.split('.')))
+ if os.path.exists(path + '/__init__.py'):
+ return path
+ filename = path + '.py'
+ if os.path.exists(filename):
+ return filename
+ return None
+
+
+def getpackage(filename):
+ """
+ Find the full dotted package name for a given python source file
+ name. Returns None if the file is not a python source file.
+
+ >>> getpackage('foo.py')
+ 'foo'
+ >>> getpackage('biff/baf.py')
+ 'baf'
+ >>> getpackage('nose/util.py')
+ 'nose.util'
+
+ Works for directories too.
+
+ >>> getpackage('nose')
+ 'nose'
+ >>> getpackage('nose/plugins')
+ 'nose.plugins'
+
+ And __init__ files stuck onto directories
+
+ >>> getpackage('nose/plugins/__init__.py')
+ 'nose.plugins'
+
+ Absolute paths also work.
+
+ >>> path = os.path.abspath(os.path.join('nose', 'plugins'))
+ >>> getpackage(path)
+ 'nose.plugins'
+ """
+ src_file = src(filename)
+ if (os.path.isdir(src_file) or not src_file.endswith('.py')) and not ispackage(src_file):
+ return None
+ base, ext = os.path.splitext(os.path.basename(src_file))
+ if base == '__init__':
+ mod_parts = []
+ else:
+ mod_parts = [base]
+ path, part = os.path.split(os.path.split(src_file)[0])
+ while part:
+ if ispackage(os.path.join(path, part)):
+ mod_parts.append(part)
+ else:
+ break
+ path, part = os.path.split(path)
+ mod_parts.reverse()
+ return '.'.join(mod_parts)
+
+
+def ln(label):
+ """Draw a 70-char-wide divider, with label in the middle.
+
+ >>> ln('hello there')
+ '---------------------------- hello there -----------------------------'
+ """
+ label_len = len(label) + 2
+ chunk = (70 - label_len) // 2
+ out = '%s %s %s' % ('-' * chunk, label, '-' * chunk)
+ pad = 70 - len(out)
+ if pad > 0:
+ out = out + ('-' * pad)
+ return out
+
+
+def resolve_name(name, module=None):
+ """Resolve a dotted name to a module and its parts. This is stolen
+ wholesale from unittest.TestLoader.loadTestByName.
+
+ >>> resolve_name('nose.util') #doctest: +ELLIPSIS
+ <module 'nose.util' from...>
+ >>> resolve_name('nose.util.resolve_name') #doctest: +ELLIPSIS
+ <function resolve_name at...>
+ """
+ parts = name.split('.')
+ parts_copy = parts[:]
+ if module is None:
+ while parts_copy:
+ try:
+ log.debug("__import__ %s", name)
+ module = __import__('.'.join(parts_copy))
+ break
+ except ImportError:
+ del parts_copy[-1]
+ if not parts_copy:
+ raise
+ parts = parts[1:]
+ obj = module
+ log.debug("resolve: %s, %s, %s, %s", parts, name, obj, module)
+ for part in parts:
+ obj = getattr(obj, part)
+ return obj
+
+
+def split_test_name(test):
+ """Split a test name into a 3-tuple containing file, module, and callable
+ names, any of which (but not all) may be blank.
+
+ Test names are in the form:
+
+ file_or_module:callable
+
+ Either side of the : may be dotted. To change the splitting behavior, you
+ can alter nose.util.split_test_re.
+ """
+ norm = os.path.normpath
+ file_or_mod = test
+ fn = None
+ if not ':' in test:
+ # only a file or mod part
+ if file_like(test):
+ return (norm(test), None, None)
+ else:
+ return (None, test, None)
+
+ # could be path|mod:callable, or a : in the file path someplace
+ head, tail = os.path.split(test)
+ if not head:
+ # this is a case like 'foo:bar' -- generally a module
+ # name followed by a callable, but also may be a windows
+ # drive letter followed by a path
+ try:
+ file_or_mod, fn = test.split(':')
+ if file_like(fn):
+ # must be a funny path
+ file_or_mod, fn = test, None
+ except ValueError:
+ # more than one : in the test
+ # this is a case like c:\some\path.py:a_test
+ parts = test.split(':')
+ if len(parts[0]) == 1:
+ file_or_mod, fn = ':'.join(parts[:-1]), parts[-1]
+ else:
+ # nonsense like foo:bar:baz
+ raise ValueError("Test name '%s' could not be parsed. Please "
+ "format test names as path:callable or "
+ "module:callable." % (test,))
+ elif not tail:
+ # this is a case like 'foo:bar/'
+ # : must be part of the file path, so ignore it
+ file_or_mod = test
+ else:
+ if ':' in tail:
+ file_part, fn = tail.split(':')
+ else:
+ file_part = tail
+ file_or_mod = os.sep.join([head, file_part])
+ if file_or_mod:
+ if file_like(file_or_mod):
+ return (norm(file_or_mod), None, fn)
+ else:
+ return (None, file_or_mod, fn)
+ else:
+ return (None, None, fn)
+split_test_name.__test__ = False # do not collect
+
+
+def test_address(test):
+ """Find the test address for a test, which may be a module, filename,
+ class, method or function.
+ """
+ if hasattr(test, "address"):
+ return test.address()
+ # type-based polymorphism sucks in general, but I believe is
+ # appropriate here
+ t = type(test)
+ file = module = call = None
+ if t == types.ModuleType:
+ file = getattr(test, '__file__', None)
+ module = getattr(test, '__name__', None)
+ return (src(file), module, call)
+ if t == types.FunctionType or issubclass(t, type) or t == types.ClassType:
+ module = getattr(test, '__module__', None)
+ if module is not None:
+ m = sys.modules[module]
+ file = getattr(m, '__file__', None)
+ if file is not None:
+ file = os.path.abspath(file)
+ call = getattr(test, '__name__', None)
+ return (src(file), module, call)
+ if t == types.MethodType:
+ cls_adr = test_address(test.im_class)
+ return (src(cls_adr[0]), cls_adr[1],
+ "%s.%s" % (cls_adr[2], test.__name__))
+ # handle unittest.TestCase instances
+ if isinstance(test, unittest.TestCase):
+ if (hasattr(test, '_FunctionTestCase__testFunc') # pre 2.7
+ or hasattr(test, '_testFunc')): # 2.7
+ # unittest FunctionTestCase
+ try:
+ return test_address(test._FunctionTestCase__testFunc)
+ except AttributeError:
+ return test_address(test._testFunc)
+ # regular unittest.TestCase
+ cls_adr = test_address(test.__class__)
+ # 2.5 compat: __testMethodName changed to _testMethodName
+ try:
+ method_name = test._TestCase__testMethodName
+ except AttributeError:
+ method_name = test._testMethodName
+ return (src(cls_adr[0]), cls_adr[1],
+ "%s.%s" % (cls_adr[2], method_name))
+ if (hasattr(test, '__class__') and
+ test.__class__.__module__ not in ('__builtin__', 'builtins')):
+ return test_address(test.__class__)
+ raise TypeError("I don't know what %s is (%s)" % (test, t))
+test_address.__test__ = False # do not collect
+
+
+def try_run(obj, names):
+ """Given a list of possible method names, try to run them with the
+ provided object. Keep going until something works. Used to run
+ setup/teardown methods for module, package, and function tests.
+ """
+ for name in names:
+ func = getattr(obj, name, None)
+ if func is not None:
+ if type(obj) == types.ModuleType:
+ # py.test compatibility
+ if isinstance(func, types.FunctionType):
+ args, varargs, varkw, defaults = \
+ inspect.getargspec(func)
+ else:
+ # Not a function. If it's callable, call it anyway
+ if hasattr(func, '__call__') and not inspect.ismethod(func):
+ func = func.__call__
+ try:
+ args, varargs, varkw, defaults = \
+ inspect.getargspec(func)
+ args.pop(0) # pop the self off
+ except TypeError:
+ raise TypeError("Attribute %s of %r is not a python "
+ "function. Only functions or callables"
+ " may be used as fixtures." %
+ (name, obj))
+ if len(args):
+ log.debug("call fixture %s.%s(%s)", obj, name, obj)
+ return func(obj)
+ log.debug("call fixture %s.%s", obj, name)
+ return func()
+
+
+def src(filename):
+ """Find the python source file for a .pyc, .pyo or $py.class file on
+ jython. Returns the filename provided if it is not a python source
+ file.
+ """
+ if filename is None:
+ return filename
+ if sys.platform.startswith('java') and filename.endswith('$py.class'):
+ return '.'.join((filename[:-9], 'py'))
+ base, ext = os.path.splitext(filename)
+ if ext in ('.pyc', '.pyo', '.py'):
+ return '.'.join((base, 'py'))
+ return filename
+
+
+def regex_last_key(regex):
+ """Sort key function factory that puts items that match a
+ regular expression last.
+
+ >>> from nose.config import Config
+ >>> from nose.pyversion import sort_list
+ >>> c = Config()
+ >>> regex = c.testMatch
+ >>> entries = ['.', '..', 'a_test', 'src', 'lib', 'test', 'foo.py']
+ >>> sort_list(entries, regex_last_key(regex))
+ >>> entries
+ ['.', '..', 'foo.py', 'lib', 'src', 'a_test', 'test']
+ """
+ def k(obj):
+ if regex.search(obj):
+ return (1, obj)
+ return (0, obj)
+ return k
+
+
+def tolist(val):
+ """Convert a value that may be a list or a (possibly comma-separated)
+ string into a list. The exception: None is returned as None, not [None].
+
+ >>> tolist(["one", "two"])
+ ['one', 'two']
+ >>> tolist("hello")
+ ['hello']
+ >>> tolist("separate,values, with, commas, spaces , are ,ok")
+ ['separate', 'values', 'with', 'commas', 'spaces', 'are', 'ok']
+ """
+ if val is None:
+ return None
+ try:
+ # might already be a list
+ val.extend([])
+ return val
+ except AttributeError:
+ pass
+ # might be a string
+ try:
+ return re.split(r'\s*,\s*', val)
+ except TypeError:
+ # who knows...
+ return list(val)
+
+
+class odict(dict):
+ """Simple ordered dict implementation, based on:
+
+ http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/107747
+ """
+ def __init__(self, *arg, **kw):
+ self._keys = []
+ super(odict, self).__init__(*arg, **kw)
+
+ def __delitem__(self, key):
+ super(odict, self).__delitem__(key)
+ self._keys.remove(key)
+
+ def __setitem__(self, key, item):
+ super(odict, self).__setitem__(key, item)
+ if key not in self._keys:
+ self._keys.append(key)
+
+ def __str__(self):
+ return "{%s}" % ', '.join(["%r: %r" % (k, v) for k, v in self.items()])
+
+ def clear(self):
+ super(odict, self).clear()
+ self._keys = []
+
+ def copy(self):
+ d = super(odict, self).copy()
+ d._keys = self._keys[:]
+ return d
+
+ def items(self):
+ return zip(self._keys, self.values())
+
+ def keys(self):
+ return self._keys[:]
+
+ def setdefault(self, key, failobj=None):
+ item = super(odict, self).setdefault(key, failobj)
+ if key not in self._keys:
+ self._keys.append(key)
+ return item
+
+ def update(self, dict):
+ super(odict, self).update(dict)
+ for key in dict.keys():
+ if key not in self._keys:
+ self._keys.append(key)
+
+ def values(self):
+ return map(self.get, self._keys)
+
+
+def transplant_func(func, module):
+ """
+ Make a function imported from module A appear as if it is located
+ in module B.
+
+ >>> from pprint import pprint
+ >>> pprint.__module__
+ 'pprint'
+ >>> pp = transplant_func(pprint, __name__)
+ >>> pp.__module__
+ 'nose.util'
+
+ The original function is not modified.
+
+ >>> pprint.__module__
+ 'pprint'
+
+ Calling the transplanted function calls the original.
+
+ >>> pp([1, 2])
+ [1, 2]
+ >>> pprint([1,2])
+ [1, 2]
+
+ """
+ from nose.tools import make_decorator
+ if isgenerator(func):
+ def newfunc(*arg, **kw):
+ for v in func(*arg, **kw):
+ yield v
+ else:
+ def newfunc(*arg, **kw):
+ return func(*arg, **kw)
+
+ newfunc = make_decorator(func)(newfunc)
+ newfunc.__module__ = module
+ return newfunc
+
+
+def transplant_class(cls, module):
+ """
+ Make a class appear to reside in `module`, rather than the module in which
+ it is actually defined.
+
+ >>> from nose.failure import Failure
+ >>> Failure.__module__
+ 'nose.failure'
+ >>> Nf = transplant_class(Failure, __name__)
+ >>> Nf.__module__
+ 'nose.util'
+ >>> Nf.__name__
+ 'Failure'
+
+ """
+ class C(cls):
+ pass
+ C.__module__ = module
+ C.__name__ = cls.__name__
+ return C
+
+
+def safe_str(val, encoding='utf-8'):
+ try:
+ return str(val)
+ except UnicodeEncodeError:
+ if isinstance(val, Exception):
+ return ' '.join([safe_str(arg, encoding)
+ for arg in val])
+ return unicode(val).encode(encoding)
+
+
+def is_executable(file):
+ if not os.path.exists(file):
+ return False
+ st = os.stat(file)
+ return bool(st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH))
+
+
+if __name__ == '__main__':
+ import doctest
+ doctest.testmod()
diff --git a/lib/spack/external/ordereddict.py b/lib/spack/external/ordereddict_backport.py
index 8ddad1477e..8ddad1477e 100644
--- a/lib/spack/external/ordereddict.py
+++ b/lib/spack/external/ordereddict_backport.py
diff --git a/lib/spack/external/pyqver2.py b/lib/spack/external/pyqver2.py
index 4a16e2811e..4690239748 100755
--- a/lib/spack/external/pyqver2.py
+++ b/lib/spack/external/pyqver2.py
@@ -30,7 +30,8 @@ import sys
StandardModules = {
"__future__": (2, 1),
"abc": (2, 6),
- "argparse": (2, 7),
+# skip argparse now that it's in lib/spack/external
+# "argparse": (2, 7),
"ast": (2, 6),
"atexit": (2, 0),
"bz2": (2, 3),
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index 029a7536df..24cfbfde71 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -222,7 +222,7 @@ def working_dir(dirname, **kwargs):
def touch(path):
"""Creates an empty file at the specified path."""
- with closing(open(path, 'a')) as file:
+ with open(path, 'a') as file:
os.utime(path, None)
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 9e1bef18ca..1c4d1ed623 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -26,6 +26,7 @@ import os
import re
import sys
import functools
+import collections
import inspect
# Ignore emacs backups when listing modules
@@ -87,10 +88,7 @@ def index_by(objects, *funcs):
result = {}
for o in objects:
key = f(o)
- if key not in result:
- result[key] = [o]
- else:
- result[key].append(o)
+ result.setdefault(key, []).append(o)
for key, objects in result.items():
result[key] = index_by(objects, *funcs[1:])
@@ -115,7 +113,7 @@ def partition_list(elements, predicate):
def caller_locals():
"""This will return the locals of the *parent* of the caller.
- This allows a fucntion to insert variables into its caller's
+ This allows a function to insert variables into its caller's
scope. Yes, this is some black magic, and yes it's useful
for implementing things like depends_on and provides.
"""
@@ -170,16 +168,32 @@ def has_method(cls, name):
return False
-def memoized(obj):
+class memoized(object):
"""Decorator that caches the results of a function, storing them
in an attribute of that function."""
- cache = obj.cache = {}
- @functools.wraps(obj)
- def memoizer(*args, **kwargs):
- if args not in cache:
- cache[args] = obj(*args, **kwargs)
- return cache[args]
- return memoizer
+ def __init__(self, func):
+ self.func = func
+ self.cache = {}
+
+
+ def __call__(self, *args):
+ if not isinstance(args, collections.Hashable):
+ # Not hashable, so just call the function.
+ return self.func(*args)
+
+ if args not in self.cache:
+ self.cache[args] = self.func(*args)
+ return self.cache[args]
+
+
+ def __get__(self, obj, objtype):
+ """Support instance methods."""
+ return functools.partial(self.__call__, obj)
+
+
+ def clear(self):
+ """Expunge cache so that self.func will be called again."""
+ self.cache.clear()
def list_modules(directory, **kwargs):
diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py
index 583f077b79..6ae8aff75c 100644
--- a/lib/spack/llnl/util/link_tree.py
+++ b/lib/spack/llnl/util/link_tree.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py
new file mode 100644
index 0000000000..a7a9bf6b19
--- /dev/null
+++ b/lib/spack/llnl/util/lock.py
@@ -0,0 +1,175 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import fcntl
+import errno
+import time
+import socket
+
+# Default timeout in seconds, after which locks will raise exceptions.
+_default_timeout = 60
+
+# Sleep time per iteration in spin loop (in seconds)
+_sleep_time = 1e-5
+
+
+class Lock(object):
+ def __init__(self,file_path):
+ self._file_path = file_path
+ self._fd = None
+ self._reads = 0
+ self._writes = 0
+
+
+ def _lock(self, op, timeout):
+ """This takes a lock using POSIX locks (``fnctl.lockf``).
+
+ The lock is implemented as a spin lock using a nonblocking
+ call to lockf().
+
+ On acquiring an exclusive lock, the lock writes this process's
+ pid and host to the lock file, in case the holding process
+ needs to be killed later.
+
+ If the lock times out, it raises a ``LockError``.
+ """
+ start_time = time.time()
+ while (time.time() - start_time) < timeout:
+ try:
+ if self._fd is None:
+ self._fd = os.open(self._file_path, os.O_RDWR)
+
+ fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
+ if op == fcntl.LOCK_EX:
+ os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
+ return
+
+ except IOError as error:
+ if error.errno == errno.EAGAIN or error.errno == errno.EACCES:
+ pass
+ else:
+ raise
+ time.sleep(_sleep_time)
+
+ raise LockError("Timed out waiting for lock.")
+
+
+ def _unlock(self):
+ """Releases a lock using POSIX locks (``fcntl.lockf``)
+
+ Releases the lock regardless of mode. Note that read locks may
+ be masquerading as write locks, but this removes either.
+
+ """
+ fcntl.lockf(self._fd,fcntl.LOCK_UN)
+ os.close(self._fd)
+ self._fd = None
+
+
+ def acquire_read(self, timeout=_default_timeout):
+ """Acquires a recursive, shared lock for reading.
+
+ Read and write locks can be acquired and released in arbitrary
+ order, but the POSIX lock is held until all local read and
+ write locks are released.
+
+ Returns True if it is the first acquire and actually acquires
+ the POSIX lock, False if it is a nested transaction.
+
+ """
+ if self._reads == 0 and self._writes == 0:
+ self._lock(fcntl.LOCK_SH, timeout) # can raise LockError.
+ self._reads += 1
+ return True
+ else:
+ self._reads += 1
+ return False
+
+
+ def acquire_write(self, timeout=_default_timeout):
+ """Acquires a recursive, exclusive lock for writing.
+
+ Read and write locks can be acquired and released in arbitrary
+ order, but the POSIX lock is held until all local read and
+ write locks are released.
+
+ Returns True if it is the first acquire and actually acquires
+ the POSIX lock, False if it is a nested transaction.
+
+ """
+ if self._writes == 0:
+ self._lock(fcntl.LOCK_EX, timeout) # can raise LockError.
+ self._writes += 1
+ return True
+ else:
+ self._writes += 1
+ return False
+
+
+ def release_read(self):
+ """Releases a read lock.
+
+ Returns True if the last recursive lock was released, False if
+ there are still outstanding locks.
+
+ Does limited correctness checking: if a read lock is released
+ when none are held, this will raise an assertion error.
+
+ """
+ assert self._reads > 0
+
+ if self._reads == 1 and self._writes == 0:
+ self._unlock() # can raise LockError.
+ self._reads -= 1
+ return True
+ else:
+ self._reads -= 1
+ return False
+
+
+ def release_write(self):
+ """Releases a write lock.
+
+ Returns True if the last recursive lock was released, False if
+ there are still outstanding locks.
+
+ Does limited correctness checking: if a read lock is released
+ when none are held, this will raise an assertion error.
+
+ """
+ assert self._writes > 0
+
+ if self._writes == 1 and self._reads == 0:
+ self._unlock() # can raise LockError.
+ self._writes -= 1
+ return True
+ else:
+ self._writes -= 1
+ return False
+
+
+class LockError(Exception):
+ """Raised when an attempt to acquire a lock times out."""
+ pass
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index 48368543ff..3ecd3a4ac2 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -63,35 +63,46 @@ def msg(message, *args):
def info(message, *args, **kwargs):
format = kwargs.get('format', '*b')
stream = kwargs.get('stream', sys.stdout)
+ wrap = kwargs.get('wrap', False)
cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
for arg in args:
- lines = textwrap.wrap(
- str(arg), initial_indent=indent, subsequent_indent=indent)
- for line in lines:
- stream.write(line + '\n')
+ if wrap:
+ lines = textwrap.wrap(
+ str(arg), initial_indent=indent, subsequent_indent=indent)
+ for line in lines:
+ stream.write(line + '\n')
+ else:
+ stream.write(indent + str(arg) + '\n')
-def verbose(message, *args):
+def verbose(message, *args, **kwargs):
if _verbose:
- info(message, *args, format='c')
+ kwargs.setdefault('format', 'c')
+ info(message, *args, **kwargs)
-def debug(message, *args):
+def debug(message, *args, **kwargs):
if _debug:
- info(message, *args, format='g', stream=sys.stderr)
+ kwargs.setdefault('format', 'g')
+ kwargs.setdefault('stream', sys.stderr)
+ info(message, *args, **kwargs)
-def error(message, *args):
- info("Error: " + str(message), *args, format='*r', stream=sys.stderr)
+def error(message, *args, **kwargs):
+ kwargs.setdefault('format', '*r')
+ kwargs.setdefault('stream', sys.stderr)
+ info("Error: " + str(message), *args, **kwargs)
-def warn(message, *args):
- info("Warning: " + str(message), *args, format='*Y', stream=sys.stderr)
+def warn(message, *args, **kwargs):
+ kwargs.setdefault('format', '*Y')
+ kwargs.setdefault('stream', sys.stderr)
+ info("Warning: " + str(message), *args, **kwargs)
-def die(message, *args):
- error(message, *args)
+def die(message, *args, **kwargs):
+ error(message, *args, **kwargs)
sys.exit(1)
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index 66c52c3968..47c3cc4f8f 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -33,8 +33,7 @@ import struct
from StringIO import StringIO
from llnl.util.tty import terminal_size
-from llnl.util.tty.color import clen
-
+from llnl.util.tty.color import clen, cextra
class ColumnConfig:
def __init__(self, cols):
@@ -42,7 +41,6 @@ class ColumnConfig:
self.line_length = 0
self.valid = True
self.widths = [0] * cols # does not include ansi colors
- self.cwidths = [0] * cols # includes ansi colors
def __repr__(self):
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
@@ -66,8 +64,6 @@ def config_variable_cols(elts, console_width, padding, cols=0):
# Get a bound on the most columns we could possibly have.
# 'clen' ignores length of ansi color sequences.
lengths = [clen(e) for e in elts]
- clengths = [len(e) for e in elts]
-
max_cols = max(1, console_width / (min(lengths) + padding))
max_cols = min(len(elts), max_cols)
@@ -85,7 +81,6 @@ def config_variable_cols(elts, console_width, padding, cols=0):
if conf.widths[col] < (length + p):
conf.line_length += length + p - conf.widths[col]
conf.widths[col] = length + p
- conf.cwidths[col] = clengths[i] + p
conf.valid = (conf.line_length < console_width)
try:
@@ -118,7 +113,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
config = ColumnConfig(cols)
config.widths = [max_len] * cols
- config.cwidths = [max_clen] * cols
return config
@@ -147,9 +141,6 @@ def colify(elts, **options):
method=<string> Method to use to fit columns. Options are variable or uniform.
Variable-width columns are tighter, uniform columns are all the
same width and fit less data on the screen.
-
- len=<func> Function to use for calculating string length.
- Useful for ignoring ansi color. Default is 'len'.
"""
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)
@@ -199,9 +190,6 @@ def colify(elts, **options):
raise ValueError("method must be one of: " + allowed_methods)
cols = config.cols
- formats = ["%%-%ds" % width for width in config.cwidths[:-1]]
- formats.append("%s") # last column has no trailing space
-
rows = (len(elts) + cols - 1) / cols
rows_last_col = len(elts) % rows
@@ -209,7 +197,9 @@ def colify(elts, **options):
output.write(" " * indent)
for col in xrange(cols):
elt = col * rows + row
- output.write(formats[col] % elts[elt])
+ width = config.widths[col] + cextra(elts[elt])
+ fmt = '%%-%ds' % width
+ output.write(fmt % elts[elt])
output.write("\n")
row += 1
@@ -220,6 +210,13 @@ def colify(elts, **options):
def colify_table(table, **options):
+ """Version of colify() for data expressed in rows, (list of lists).
+
+ Same as regular colify but takes a list of lists, where each
+ sub-list must be the same length, and each is interpreted as a
+ row in a table. Regular colify displays a sequential list of
+ values in columns.
+ """
if table is None:
raise TypeError("Can't call colify_table on NoneType")
elif not table or not table[0]:
diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py
index 22080a7b37..167a99d3c2 100644
--- a/lib/spack/llnl/util/tty/color.py
+++ b/lib/spack/llnl/util/tty/color.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -158,6 +158,11 @@ def clen(string):
return len(re.sub(r'\033[^m]*m', '', string))
+def cextra(string):
+ """"Length of extra color characters in a string"""
+ return len(''.join(re.findall(r'\033[^m]*m', string)))
+
+
def cwrite(string, stream=sys.stdout, color=None):
"""Replace all color expressions in string with ANSI control
codes and write the result to the stream. If color is
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index 5a52d45bc7..22f1087e53 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -122,6 +122,10 @@ class log_output(object):
self.force_color = force_color
self.debug = debug
+ # Default is to try file-descriptor reassignment unless the system
+ # out/err streams do not have an associated file descriptor
+ self.directAssignment = False
+
def trace(self, frame, event, arg):
"""Jumps to __exit__ on the child process."""
raise _SkipWithBlock()
@@ -185,13 +189,21 @@ class log_output(object):
# Child: redirect output, execute the with block.
os.close(read)
- # Save old stdout and stderr
- self._stdout = os.dup(sys.stdout.fileno())
- self._stderr = os.dup(sys.stderr.fileno())
-
- # redirect to the pipe.
- os.dup2(write, sys.stdout.fileno())
- os.dup2(write, sys.stderr.fileno())
+ try:
+ # Save old stdout and stderr
+ self._stdout = os.dup(sys.stdout.fileno())
+ self._stderr = os.dup(sys.stderr.fileno())
+
+ # redirect to the pipe.
+ os.dup2(write, sys.stdout.fileno())
+ os.dup2(write, sys.stderr.fileno())
+ except AttributeError:
+ self.directAssignment = True
+ self._stdout = sys.stdout
+ self._stderr = sys.stderr
+ output_redirect = os.fdopen(write, 'w')
+ sys.stdout = output_redirect
+ sys.stderr = output_redirect
if self.force_color:
color._force_color = True
@@ -218,8 +230,12 @@ class log_output(object):
#
# TODO: think about how this works outside install.
# TODO: ideally would propagate exception to parent...
- os.dup2(self._stdout, sys.stdout.fileno())
- os.dup2(self._stderr, sys.stderr.fileno())
+ if self.directAssignment:
+ sys.stdout = self._stdout
+ sys.stderr = self._stderr
+ else:
+ os.dup2(self._stdout, sys.stdout.fileno())
+ os.dup2(self._stderr, sys.stderr.fileno())
return False
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 5783005b5b..3051d3f742 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,44 +23,56 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import sys
import tempfile
+import getpass
from llnl.util.filesystem import *
+import llnl.util.tty as tty
# This lives in $prefix/lib/spack/spack/__file__
-prefix = ancestor(__file__, 4)
+spack_root = ancestor(__file__, 4)
# The spack script itself
-spack_file = join_path(prefix, "bin", "spack")
+spack_file = join_path(spack_root, "bin", "spack")
# spack directory hierarchy
-etc_path = join_path(prefix, "etc")
-lib_path = join_path(prefix, "lib", "spack")
+lib_path = join_path(spack_root, "lib", "spack")
build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
compilers_path = join_path(module_path, "compilers")
test_path = join_path(module_path, "test")
hooks_path = join_path(module_path, "hooks")
-var_path = join_path(prefix, "var", "spack")
+var_path = join_path(spack_root, "var", "spack")
stage_path = join_path(var_path, "stage")
+repos_path = join_path(var_path, "repos")
+share_path = join_path(spack_root, "share", "spack")
+
+prefix = spack_root
opt_path = join_path(prefix, "opt")
install_path = join_path(opt_path, "spack")
-share_path = join_path(prefix, "share", "spack")
+etc_path = join_path(prefix, "etc")
#
-# Set up the packages database.
+# Set up the default packages database.
#
-from spack.packages import PackageDB
-packages_path = join_path(var_path, "packages")
-db = PackageDB(packages_path)
+import spack.repository
+try:
+ repo = spack.repository.RepoPath()
+ sys.meta_path.append(repo)
+except spack.error.SpackError, e:
+ tty.die('while initializing Spack RepoPath:', e.message)
#
-# Paths to mock files for testing.
+# Set up the installed packages database
#
-mock_packages_path = join_path(var_path, "mock_packages")
+from spack.database import Database
+installed_db = Database(install_path)
-mock_config_path = join_path(var_path, "mock_configs")
-mock_site_config = join_path(mock_config_path, "site_spackconfig")
-mock_user_config = join_path(mock_config_path, "user_spackconfig")
+#
+# Paths to built-in Spack repositories.
+#
+packages_path = join_path(repos_path, "builtin")
+mock_packages_path = join_path(repos_path, "builtin.mock")
#
# This controls how spack lays out install prefixes and
@@ -117,9 +129,17 @@ use_tmp_stage = True
# that it can create.
tmp_dirs = []
_default_tmp = tempfile.gettempdir()
-if _default_tmp != os.getcwd():
- tmp_dirs.append(os.path.join(_default_tmp, 'spack-stage'))
-tmp_dirs.append('/nfs/tmp2/%u/spack-stage')
+_tmp_user = getpass.getuser()
+
+_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
+for path in _tmp_candidates:
+ # don't add a second username if it's already unique by user.
+ if not _tmp_user in path:
+ tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
+
+for path in _tmp_candidates:
+ if not path in tmp_dirs:
+ tmp_dirs.append(join_path(path, 'spack-stage'))
# Whether spack should allow installation of unsafe versions of
# software. "Unsafe" versions are ones it doesn't have a checksum
@@ -146,7 +166,7 @@ sys_type = None
# When packages call 'from spack import *', this extra stuff is brought in.
#
# Spack internal code should call 'import spack' and accesses other
-# variables (spack.db, paths, etc.) directly.
+# variables (spack.repo, paths, etc.) directly.
#
# TODO: maybe this should be separated out and should go in build_environment.py?
# TODO: it's not clear where all the stuff that needs to be included in packages
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index 0c4b605e91..2701fab90c 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,13 +23,13 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import platform as py_platform
+import re
+import platform
from llnl.util.lang import memoized
import spack
import spack.error as serr
-from spack.version import Version
class InvalidSysTypeError(serr.SpackError):
@@ -59,14 +59,11 @@ def get_sys_type_from_environment():
return os.environ.get('SYS_TYPE')
-def get_mac_sys_type():
- """Return a Mac OS SYS_TYPE or None if this isn't a mac."""
- mac_ver = py_platform.mac_ver()[0]
- if not mac_ver:
- return None
-
- return "macosx_%s_%s" % (
- Version(mac_ver).up_to(2), py_platform.machine())
+def get_sys_type_from_platform():
+ """Return the architecture from Python's platform module."""
+ sys_type = platform.system() + '-' + platform.machine()
+ sys_type = re.sub(r'[^\w-]', '_', sys_type)
+ return sys_type.lower()
@memoized
@@ -74,7 +71,7 @@ def sys_type():
"""Returns a SysType for the current machine."""
methods = [get_sys_type_from_spack_globals,
get_sys_type_from_environment,
- get_mac_sys_type]
+ get_sys_type_from_platform]
# search for a method that doesn't return None
sys_type = None
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index a133faa629..b2db83acb7 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -88,10 +88,14 @@ def set_compiler_environment_variables(pkg):
compiler = pkg.compiler
# Set compiler variables used by CMake and autotools
- os.environ['CC'] = join_path(spack.build_env_path, 'cc')
- os.environ['CXX'] = join_path(spack.build_env_path, 'c++')
- os.environ['F77'] = join_path(spack.build_env_path, 'f77')
- os.environ['FC'] = join_path(spack.build_env_path, 'f90')
+ assert all(key in pkg.compiler.link_paths
+ for key in ('cc', 'cxx', 'f77', 'fc'))
+
+ link_dir = spack.build_env_path
+ os.environ['CC'] = join_path(link_dir, pkg.compiler.link_paths['cc'])
+ os.environ['CXX'] = join_path(link_dir, pkg.compiler.link_paths['cxx'])
+ os.environ['F77'] = join_path(link_dir, pkg.compiler.link_paths['f77'])
+ os.environ['FC'] = join_path(link_dir, pkg.compiler.link_paths['fc'])
# Set SPACK compiler variables so that our wrapper knows what to call
if compiler.cc:
@@ -110,11 +114,23 @@ def set_build_environment_variables(pkg):
"""This ensures a clean install environment when we build packages.
"""
# Add spack build environment path with compiler wrappers first in
- # the path. We handle case sensitivity conflicts like "CC" and
- # "cc" by putting one in the <build_env_path>/case-insensitive
+ # the path. We add both spack.env_path, which includes default
+ # wrappers (cc, c++, f77, f90), AND a subdirectory containing
+ # compiler-specific symlinks. The latter ensures that builds that
+ # are sensitive to the *name* of the compiler see the right name
+ # when we're building wtih the wrappers.
+ #
+ # Conflicts on case-insensitive systems (like "CC" and "cc") are
+ # handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
- env_paths = [spack.build_env_path,
- join_path(spack.build_env_path, 'case-insensitive')]
+ env_paths = []
+ def add_env_path(path):
+ env_paths.append(path)
+ ci = join_path(path, 'case-insensitive')
+ if os.path.isdir(ci): env_paths.append(ci)
+ add_env_path(spack.build_env_path)
+ add_env_path(join_path(spack.build_env_path, pkg.compiler.name))
+
path_put_first("PATH", env_paths)
path_set(SPACK_ENV_PATH, env_paths)
@@ -129,7 +145,7 @@ def set_build_environment_variables(pkg):
# Install root prefix
os.environ[SPACK_INSTALL] = spack.install_path
- # Remove these vars from the environment during build becaus they
+ # Remove these vars from the environment during build because they
# can affect how some packages find libraries. We want to make
# sure that builds never pull in unintended external dependencies.
pop_keys(os.environ, "LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH")
@@ -157,7 +173,7 @@ def set_build_environment_variables(pkg):
path_set("PKG_CONFIG_PATH", pkg_config_dirs)
-def set_module_variables_for_package(pkg):
+def set_module_variables_for_package(pkg, m):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
@@ -228,11 +244,32 @@ def get_rpaths(pkg):
return rpaths
+def parent_class_modules(cls):
+ """Get list of super class modules that are all descend from spack.Package"""
+ if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
+ return []
+ result = []
+ module = sys.modules.get(cls.__module__)
+ if module:
+ result = [ module ]
+ for c in cls.__bases__:
+ result.extend(parent_class_modules(c))
+ return result
+
+
def setup_package(pkg):
"""Execute all environment setup routines."""
set_compiler_environment_variables(pkg)
set_build_environment_variables(pkg)
- set_module_variables_for_package(pkg)
+
+ # If a user makes their own package repo, e.g.
+ # spack.repos.mystuff.libelf.Libelf, and they inherit from
+ # an existing class like spack.repos.original.libelf.Libelf,
+ # then set the module variables for both classes so the
+ # parent class can still use them if it gets called.
+ modules = parent_class_modules(pkg.__class__)
+ for mod in modules:
+ set_module_variables_for_package(pkg, mod)
# Allow dependencies to set up environment as well.
for dep_spec in pkg.spec.traverse(root=False):
@@ -296,4 +333,9 @@ def fork(pkg, function):
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
- sys.exit(1)
+ raise InstallError("Installation process had nonzero exit code."
+ .format(str(returncode)))
+
+
+class InstallError(spack.error.SpackError):
+ """Raised when a package fails to install"""
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index b96ac5af51..6c635a1e6c 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -31,6 +31,15 @@ from llnl.util.lang import attr_setdefault
import spack
import spack.spec
+import spack.config
+
+#
+# Settings for commands that modify configuration
+#
+# Commands that modify confguration By default modify the *highest* priority scope.
+default_modify_scope = spack.config.highest_precedence_scope().name
+# Commands that list confguration list *all* scopes by default.
+default_list_scope = None
# cmd has a submodule called "list" so preserve the python list module
python_list = list
@@ -124,7 +133,7 @@ def elide_list(line_list, max_num=10):
def disambiguate_spec(spec):
- matching_specs = spack.db.get_installed(spec)
+ matching_specs = spack.installed_db.query(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py
index 1004f1f8e6..bcd01f2a28 100644
--- a/lib/spack/spack/cmd/activate.py
+++ b/lib/spack/spack/cmd/activate.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py
index f0e88d1849..db27544ffd 100644
--- a/lib/spack/spack/cmd/arch.py
+++ b/lib/spack/spack/cmd/arch.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index f75b68b00a..bdbd623b39 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -42,7 +42,7 @@ def get_origin_url():
git = which('git', required=True)
origin_url = git(
'--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
- return_output=True)
+ output=str)
return origin_url.strip()
diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py
index 24d56db7d0..16cbe6555a 100644
--- a/lib/spack/spack/cmd/cd.py
+++ b/lib/spack/spack/cmd/cd.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 8a448450c2..b1ad89dbb8 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -24,7 +24,7 @@
##############################################################################
import os
import re
-from external import argparse
+import argparse
import hashlib
from pprint import pprint
from subprocess import CalledProcessError
@@ -81,7 +81,7 @@ def get_checksums(versions, urls, **kwargs):
def checksum(parser, args):
# get the package we're going to generate checksums for
- pkg = spack.db.get(args.package)
+ pkg = spack.repo.get(args.package)
# If the user asked for specific versions, use those.
if args.versions:
diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py
index c20136ebe5..6e7179122c 100644
--- a/lib/spack/spack/cmd/clean.py
+++ b/lib/spack/spack/cmd/clean.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import llnl.util.tty as tty
@@ -42,5 +42,5 @@ def clean(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
- package = spack.db.get(spec)
+ package = spack.repo.get(spec)
package.do_clean()
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index 2a64dc914e..75b51f6b49 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,8 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import sys
+import argparse
import llnl.util.tty as tty
from llnl.util.tty.color import colorize
@@ -41,17 +42,32 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
- update_parser = sp.add_parser(
- 'add', help='Add compilers to the Spack configuration.')
- update_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
-
- remove_parser = sp.add_parser('remove', help='remove compiler')
- remove_parser.add_argument('path')
-
- list_parser = sp.add_parser('list', help='list available compilers')
-
- info_parser = sp.add_parser('info', help='Show compiler paths.')
+ scopes = spack.config.config_scopes
+
+ # Add
+ add_parser = sp.add_parser('add', help='Add compilers to the Spack configuration.')
+ add_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
+ add_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
+
+ # Remove
+ remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.')
+ remove_parser.add_argument(
+ '-a', '--all', action='store_true', help='Remove ALL compilers that match spec.')
+ remove_parser.add_argument('compiler_spec')
+ remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
+
+ # List
+ list_parser = sp.add_parser('list', help='list available compilers')
+ list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
+
+ # Info
+ info_parser = sp.add_parser('info', help='Show compiler paths.')
info_parser.add_argument('compiler_spec')
+ info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
def compiler_add(args):
@@ -62,26 +78,40 @@ def compiler_add(args):
paths = get_path('PATH')
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
- if c.spec not in spack.compilers.all_compilers()]
+ if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
if compilers:
- spack.compilers.add_compilers_to_config('user', *compilers)
+ spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
n = len(compilers)
- tty.msg("Added %d new compiler%s to %s" % (
- n, 's' if n > 1 else '', spack.config.get_config_scope_filename('user', 'compilers')))
+ s = 's' if n > 1 else ''
+ filename = spack.config.get_config_filename(args.scope, 'compilers')
+ tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in compilers)), indent=4)
else:
tty.msg("Found no new compilers")
def compiler_remove(args):
- pass
+ cspec = CompilerSpec(args.compiler_spec)
+ compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
+
+ if not compilers:
+ tty.die("No compilers match spec %s." % cspec)
+ elif not args.all and len(compilers) > 1:
+ tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
+ colify(reversed(sorted([c.spec for c in compilers])), indent=4)
+ tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.")
+ sys.exit(1)
+
+ for compiler in compilers:
+ spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
+ tty.msg("Removed compiler %s." % compiler.spec)
def compiler_info(args):
"""Print info about all compilers matching a spec."""
cspec = CompilerSpec(args.compiler_spec)
- compilers = spack.compilers.compilers_for_spec(cspec)
+ compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
if not compilers:
tty.error("No compilers match spec %s." % cspec)
@@ -96,7 +126,7 @@ def compiler_info(args):
def compiler_list(args):
tty.msg("Available compilers")
- index = index_by(spack.compilers.all_compilers(), 'name')
+ index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
for i, (name, compilers) in enumerate(index.items()):
if i >= 1: print
@@ -108,6 +138,7 @@ def compiler_list(args):
def compiler(parser, args):
action = { 'add' : compiler_add,
'remove' : compiler_remove,
+ 'rm' : compiler_remove,
'info' : compiler_info,
'list' : compiler_list }
action[args.compiler_command](args)
diff --git a/lib/spack/spack/cmd/compilers.py b/lib/spack/spack/cmd/compilers.py
index 8d046bfd7c..7e09016f2d 100644
--- a/lib/spack/spack/cmd/compilers.py
+++ b/lib/spack/spack/cmd/compilers.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -26,9 +26,14 @@ import llnl.util.tty as tty
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
+import spack
from spack.cmd.compiler import compiler_list
description = "List available compilers. Same as 'spack compiler list'."
+def setup_parser(subparser):
+ subparser.add_argument('--scope', choices=spack.config.config_scopes,
+ help="Configuration scope to read/modify.")
+
def compilers(parser, args):
compiler_list(args)
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index 8c18f88b64..5e6d4e4d7d 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-from external import argparse
+import argparse
import llnl.util.tty as tty
@@ -44,22 +44,22 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
get_parser = sp.add_parser('get', help='Print configuration values.')
- get_parser.add_argument('category', help="Configuration category to print.")
+ get_parser.add_argument('section', help="Configuration section to print.")
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
- edit_parser.add_argument('category', help="Configuration category to edit")
+ edit_parser.add_argument('section', help="Configuration section to edit")
def config_get(args):
- spack.config.print_category(args.category)
+ spack.config.print_section(args.section)
def config_edit(args):
if not args.scope:
args.scope = 'user'
- if not args.category:
- args.category = None
- config_file = spack.config.get_config_scope_filename(args.scope, args.category)
+ if not args.section:
+ args.section = None
+ config_file = spack.config.get_config_filename(args.scope, args.section)
spack.editor(config_file)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 46e6bcec14..edcea0718c 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -27,16 +27,18 @@ import os
import hashlib
import re
-from external.ordereddict import OrderedDict
+from ordereddict_backport import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import spack
import spack.cmd
import spack.cmd.checksum
-import spack.package
import spack.url
+import spack.util.web
+from spack.spec import Spec
from spack.util.naming import *
+from spack.repository import Repo, RepoError
import spack.util.crypto as crypto
from spack.util.executable import which
@@ -85,18 +87,34 @@ ${versions}
""")
+def make_version_calls(ver_hash_tuples):
+ """Adds a version() call to the package for each version found."""
+ max_len = max(len(str(v)) for v, h in ver_hash_tuples)
+ format = " version(%%-%ds, '%%s')" % (max_len + 2)
+ return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples)
+
+
def setup_parser(subparser):
subparser.add_argument('url', nargs='?', help="url of package archive")
subparser.add_argument(
- '--keep-stage', action='store_true', dest='keep_stage',
+ '--keep-stage', action='store_true',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
- '-n', '--name', dest='alternate_name', default=None,
+ '-n', '--name', dest='alternate_name', default=None, metavar='NAME',
help="Override the autodetected name for the created package.")
subparser.add_argument(
+ '-r', '--repo', default=None,
+ help="Path to a repository where the package should be created.")
+ subparser.add_argument(
+ '-N', '--namespace',
+ help="Specify a namespace for the package. Must be the namespace of "
+ "a repository registered with Spack.")
+ subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Overwrite any existing package file with the same name.")
+ setup_parser.subparser = subparser
+
class ConfigureGuesser(object):
def __call__(self, stage):
@@ -114,7 +132,7 @@ class ConfigureGuesser(object):
# Peek inside the tarball.
tar = which('tar')
output = tar(
- "--exclude=*/*/*", "-tf", stage.archive_file, return_output=True)
+ "--exclude=*/*/*", "-tf", stage.archive_file, output=str)
lines = output.split("\n")
# Set the configure line to the one that matched.
@@ -134,16 +152,7 @@ class ConfigureGuesser(object):
self.build_system = build_system
-def make_version_calls(ver_hash_tuples):
- """Adds a version() call to the package for each version found."""
- max_len = max(len(str(v)) for v, h in ver_hash_tuples)
- format = " version(%%-%ds, '%%s')" % (max_len + 2)
- return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples)
-
-
-def create(parser, args):
- url = args.url
-
+def guess_name_and_version(url, args):
# Try to deduce name and version of the new package from the URL
version = spack.url.parse_version(url)
if not version:
@@ -160,13 +169,53 @@ def create(parser, args):
tty.die("Couldn't guess a name for this package. Try running:", "",
"spack create --name <name> <url>")
- if not valid_module_name(name):
+ if not valid_fully_qualified_module_name(name):
tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'")
- tty.msg("This looks like a URL for %s version %s." % (name, version))
- tty.msg("Creating template for package %s" % name)
+ return name, version
+
- versions = spack.package.find_versions_of_archive(url)
+def find_repository(spec, args):
+ # figure out namespace for spec
+ if spec.namespace and args.namespace and spec.namespace != args.namespace:
+ tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace))
+
+ if not spec.namespace and args.namespace:
+ spec.namespace = args.namespace
+
+ # Figure out where the new package should live.
+ repo_path = args.repo
+ if repo_path is not None:
+ try:
+ repo = Repo(repo_path)
+ if spec.namespace and spec.namespace != repo.namespace:
+ tty.die("Can't create package with namespace %s in repo with namespace %s."
+ % (spec.namespace, repo.namespace))
+ except RepoError as e:
+ tty.die(str(e))
+ else:
+ if spec.namespace:
+ repo = spack.repo.get_repo(spec.namespace, None)
+ if not repo:
+ tty.die("Unknown namespace: %s" % spec.namespace)
+ else:
+ repo = spack.repo.first_repo()
+
+ # Set the namespace on the spec if it's not there already
+ if not spec.namespace:
+ spec.namespace = repo.namespace
+
+ return repo
+
+
+def fetch_tarballs(url, name, args):
+ """Try to find versions of the supplied archive by scraping the web.
+
+ Prompts the user to select how many to download if many are found.
+
+
+ """
+ versions = spack.util.web.find_versions_of_archive(url)
rkeys = sorted(versions.keys(), reverse=True)
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
@@ -184,13 +233,35 @@ def create(parser, args):
default=5, abort='q')
if not archives_to_fetch:
- tty.msg("Aborted.")
- return
+ tty.die("Aborted.")
+
+ sorted_versions = sorted(versions.keys(), reverse=True)
+ sorted_urls = [versions[v] for v in sorted_versions]
+ return sorted_versions[:archives_to_fetch], sorted_urls[:archives_to_fetch]
+
+
+def create(parser, args):
+ url = args.url
+ if not url:
+ setup_parser.subparser.print_help()
+ return
+
+ # Figure out a name and repo for the package.
+ name, version = guess_name_and_version(url, args)
+ spec = Spec(name)
+ name = spec.name # factors out namespace, if any
+ repo = find_repository(spec, args)
+
+ tty.msg("This looks like a URL for %s version %s." % (name, version))
+ tty.msg("Creating template for package %s" % name)
+
+ # Fetch tarballs (prompting user if necessary)
+ versions, urls = fetch_tarballs(url, name, args)
+ # Try to guess what configure system is used.
guesser = ConfigureGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
- versions.keys()[:archives_to_fetch],
- [versions[v] for v in versions.keys()[:archives_to_fetch]],
+ versions, urls,
first_stage_function=guesser,
keep_stage=args.keep_stage)
@@ -202,7 +273,7 @@ def create(parser, args):
name = 'py-%s' % name
# Create a directory for the new package.
- pkg_path = spack.db.filename_for_package_name(name)
+ pkg_path = repo.filename_for_package_name(name)
if os.path.exists(pkg_path) and not args.force:
tty.die("%s already exists." % pkg_path)
else:
diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py
index e44be41029..d6b23d6a08 100644
--- a/lib/spack/spack/cmd/deactivate.py
+++ b/lib/spack/spack/cmd/deactivate.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import llnl.util.tty as tty
import spack
@@ -37,7 +37,7 @@ def setup_parser(subparser):
help="Run deactivation even if spec is NOT currently activated.")
subparser.add_argument(
'-a', '--all', action='store_true',
- help="Deactivate all extensions of an extendable pacakge, or "
+ help="Deactivate all extensions of an extendable package, or "
"deactivate an extension AND its dependencies.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
@@ -54,7 +54,7 @@ def deactivate(parser, args):
if args.all:
if pkg.extendable:
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
- ext_pkgs = spack.db.installed_extensions_for(spec)
+ ext_pkgs = spack.installed_db.installed_extensions_for(spec)
for ext_pkg in ext_pkgs:
ext_pkg.spec.normalize()
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index 652f243b98..de76098d2f 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import llnl.util.tty as tty
diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py
index 6e7f10fba6..9df53312f8 100644
--- a/lib/spack/spack/cmd/diy.py
+++ b/lib/spack/spack/cmd/diy.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -24,7 +24,7 @@
##############################################################################
import sys
import os
-from external import argparse
+import argparse
import llnl.util.tty as tty
@@ -58,36 +58,38 @@ def diy(self, args):
if len(specs) > 1:
tty.die("spack diy only takes one spec.")
- spec = specs[0]
- if not spack.db.exists(spec.name):
- tty.warn("No such package: %s" % spec.name)
- create = tty.get_yes_or_no("Create this package?", default=False)
- if not create:
- tty.msg("Exiting without creating.")
- sys.exit(1)
- else:
- tty.msg("Running 'spack edit -f %s'" % spec.name)
- edit_package(spec.name, True)
- return
+ # Take a write lock before checking for existence.
+ with spack.installed_db.write_transaction():
+ spec = specs[0]
+ if not spack.repo.exists(spec.name):
+ tty.warn("No such package: %s" % spec.name)
+ create = tty.get_yes_or_no("Create this package?", default=False)
+ if not create:
+ tty.msg("Exiting without creating.")
+ sys.exit(1)
+ else:
+ tty.msg("Running 'spack edit -f %s'" % spec.name)
+ edit_package(spec.name, spack.repo.first_repo(), None, True)
+ return
- if not spec.version.concrete:
- tty.die("spack diy spec must have a single, concrete version.")
+ if not spec.version.concrete:
+ tty.die("spack diy spec must have a single, concrete version.")
- spec.concretize()
- package = spack.db.get(spec)
+ spec.concretize()
+ package = spack.repo.get(spec)
- if package.installed:
- tty.error("Already installed in %s" % package.prefix)
- tty.msg("Uninstall or try adding a version suffix for this DIY build.")
- sys.exit(1)
+ if package.installed:
+ tty.error("Already installed in %s" % package.prefix)
+ tty.msg("Uninstall or try adding a version suffix for this DIY build.")
+ sys.exit(1)
- # Forces the build to run out of the current directory.
- package.stage = DIYStage(os.getcwd())
+ # Forces the build to run out of the current directory.
+ package.stage = DIYStage(os.getcwd())
- # TODO: make this an argument, not a global.
- spack.do_checksum = False
+ # TODO: make this an argument, not a global.
+ spack.do_checksum = False
- package.do_install(
- keep_prefix=args.keep_prefix,
- ignore_deps=args.ignore_deps,
- keep_stage=True) # don't remove source dir for DIY.
+ package.do_install(
+ keep_prefix=args.keep_prefix,
+ ignore_deps=args.ignore_deps,
+ keep_stage=True) # don't remove source dir for DIY.
diff --git a/lib/spack/spack/cmd/doc.py b/lib/spack/spack/cmd/doc.py
index 601ae26e5e..29cadec94f 100644
--- a/lib/spack/spack/cmd/doc.py
+++ b/lib/spack/spack/cmd/doc.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py
index b8764ba391..a20e40df9b 100644
--- a/lib/spack/spack/cmd/edit.py
+++ b/lib/spack/spack/cmd/edit.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -30,6 +30,8 @@ from llnl.util.filesystem import mkdirp, join_path
import spack
import spack.cmd
+from spack.spec import Spec
+from spack.repository import Repo
from spack.util.naming import mod_to_class
description = "Open package files in $EDITOR"
@@ -53,9 +55,16 @@ class ${class_name}(Package):
""")
-def edit_package(name, force=False):
- path = spack.db.filename_for_package_name(name)
+def edit_package(name, repo_path, namespace, force=False):
+ if repo_path:
+ repo = Repo(repo_path)
+ elif namespace:
+ repo = spack.repo.get_repo(namespace)
+ else:
+ repo = spack.repo
+ path = repo.filename_for_package_name(name)
+ spec = Spec(name)
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
@@ -63,13 +72,13 @@ def edit_package(name, force=False):
tty.die("Insufficient permissions on '%s'!" % path)
elif not force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
- "to edit a new file." % name)
+ "to edit a new file." % spec.name)
else:
mkdirp(os.path.dirname(path))
with open(path, "w") as pkg_file:
pkg_file.write(
package_template.substitute(
- name=name, class_name=mod_to_class(name)))
+ name=spec.name, class_name=mod_to_class(spec.name)))
spack.editor(path)
@@ -78,9 +87,26 @@ def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', dest='force', action='store_true',
help="Open a new file in $EDITOR even if package doesn't exist.")
- subparser.add_argument(
- '-c', '--command', dest='edit_command', action='store_true',
- help="Edit the command with the supplied name instead of a package.")
+
+ excl_args = subparser.add_mutually_exclusive_group()
+
+ # Various filetypes you can edit directly from the cmd line.
+ excl_args.add_argument(
+ '-c', '--command', dest='path', action='store_const',
+ const=spack.cmd.command_path, help="Edit the command with the supplied name.")
+ excl_args.add_argument(
+ '-t', '--test', dest='path', action='store_const',
+ const=spack.test_path, help="Edit the test with the supplied name.")
+ excl_args.add_argument(
+ '-m', '--module', dest='path', action='store_const',
+ const=spack.module_path, help="Edit the main spack module with the supplied name.")
+
+ # Options for editing packages
+ excl_args.add_argument(
+ '-r', '--repo', default=None, help="Path to repo to edit package in.")
+ excl_args.add_argument(
+ '-N', '--namespace', default=None, help="Namespace of package to edit.")
+
subparser.add_argument(
'name', nargs='?', default=None, help="name of package to edit")
@@ -88,19 +114,17 @@ def setup_parser(subparser):
def edit(parser, args):
name = args.name
- if args.edit_command:
- if not name:
- path = spack.cmd.command_path
- else:
- path = join_path(spack.cmd.command_path, name + ".py")
- if not os.path.exists(path):
+ path = spack.packages_path
+ if args.path:
+ path = args.path
+ if name:
+ path = join_path(path, name + ".py")
+ if not args.force and not os.path.exists(path):
tty.die("No command named '%s'." % name)
spack.editor(path)
+ elif name:
+ edit_package(name, args.repo, args.namespace, args.force)
else:
# By default open the directory where packages or commands live.
- if not name:
- path = spack.packages_path
- spack.editor(path)
- else:
- edit_package(name, args.force)
+ spack.editor(path)
diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py
index bde76b5daf..525e955a00 100644
--- a/lib/spack/spack/cmd/env.py
+++ b/lib/spack/spack/cmd/env.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from external import argparse
+import argparse
import llnl.util.tty as tty
import spack.cmd
import spack.build_environment as build_env
diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py
index fc8e6842c3..ccb0fe4e1f 100644
--- a/lib/spack/spack/cmd/extensions.py
+++ b/lib/spack/spack/cmd/extensions.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-from external import argparse
+import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -37,7 +37,7 @@ description = "List extensions for package."
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
- '-l', '--long', action='store_const', dest='mode', const='long',
+ '-l', '--long', action='store_true', dest='long',
help='Show dependency hashes as well as versions.')
format_group.add_argument(
'-p', '--paths', action='store_const', dest='mode', const='paths',
@@ -54,7 +54,9 @@ def extensions(parser, args):
if not args.spec:
tty.die("extensions requires a package spec.")
+ #
# Checks
+ #
spec = spack.cmd.parse_specs(args.spec)
if len(spec) > 1:
tty.die("Can only list extensions for one package.")
@@ -70,8 +72,9 @@ def extensions(parser, args):
if not args.mode:
args.mode = 'short'
+ #
# List package names of extensions
- extensions = spack.db.extensions_for(spec)
+ extensions = spack.repo.extensions_for(spec)
if not extensions:
tty.msg("%s has no extensions." % spec.cshort_spec)
return
@@ -79,8 +82,10 @@ def extensions(parser, args):
tty.msg("%d extensions:" % len(extensions))
colify(ext.name for ext in extensions)
+ #
# List specs of installed extensions.
- installed = [s.spec for s in spack.db.installed_extensions_for(spec)]
+ #
+ installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)]
print
if not installed:
tty.msg("None installed.")
@@ -88,11 +93,13 @@ def extensions(parser, args):
tty.msg("%d installed:" % len(installed))
spack.cmd.find.display_specs(installed, mode=args.mode)
+ #
# List specs of activated extensions.
+ #
activated = spack.install_layout.extension_map(spec)
print
if not activated:
tty.msg("None activated.")
return
tty.msg("%d currently activated:" % len(activated))
- spack.cmd.find.display_specs(activated.values(), mode=args.mode)
+ spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long)
diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py
index 0ccebd9486..adad545cae 100644
--- a/lib/spack/spack/cmd/fetch.py
+++ b/lib/spack/spack/cmd/fetch.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack
import spack.cmd
@@ -34,9 +34,12 @@ def setup_parser(subparser):
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
+ '-m', '--missing', action='store_true', help="Also fetch all missing dependencies")
+ subparser.add_argument(
+ '-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
+ subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
-
def fetch(parser, args):
if not args.packages:
tty.die("fetch requires at least one package argument")
@@ -46,5 +49,13 @@ def fetch(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
- package = spack.db.get(spec)
+ if args.missing or args.dependencies:
+ to_fetch = set()
+ for s in spec.traverse():
+ package = spack.repo.get(s)
+ if args.missing and package.installed:
+ continue
+ package.do_fetch()
+
+ package = spack.repo.get(spec)
package.do_fetch()
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 3c993990b1..714f1d514b 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -25,7 +25,7 @@
import sys
import collections
import itertools
-from external import argparse
+import argparse
from StringIO import StringIO
import llnl.util.tty as tty
@@ -41,6 +41,9 @@ description ="Find installed spack packages"
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
+ '-s', '--short', action='store_const', dest='mode', const='short',
+ help='Show only specs (default)')
+ format_group.add_argument(
'-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to package install directories')
format_group.add_argument(
@@ -48,13 +51,26 @@ def setup_parser(subparser):
help='Show full dependency DAG of installed packages')
subparser.add_argument(
- '-l', '--long', action='store_true', dest='long',
+ '-l', '--long', action='store_true',
help='Show dependency hashes as well as versions.')
subparser.add_argument(
- '-L', '--very-long', action='store_true', dest='very_long',
+ '-L', '--very-long', action='store_true',
help='Show dependency hashes as well as versions.')
subparser.add_argument(
+ '-u', '--unknown', action='store_true',
+ help='Show only specs Spack does not have a package for.')
+ subparser.add_argument(
+ '-m', '--missing', action='store_true',
+ help='Show missing dependencies as well as installed specs.')
+ subparser.add_argument(
+ '-M', '--only-missing', action='store_true',
+ help='Show only missing dependencies.')
+ subparser.add_argument(
+ '-N', '--namespace', action='store_true',
+ help='Show fully qualified package names.')
+
+ subparser.add_argument(
'query_specs', nargs=argparse.REMAINDER,
help='optional specs to filter results')
@@ -66,6 +82,7 @@ def gray_hash(spec, length):
def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
+ namespace = kwargs.get('namespace', False)
hlen = 7
if kwargs.get('very_long', False):
@@ -87,7 +104,8 @@ def display_specs(specs, **kwargs):
specs = index[(architecture,compiler)]
specs.sort()
- abbreviated = [s.format('$_$@$+', color=True) for s in specs]
+ nfmt = '.' if namespace else '_'
+ abbreviated = [s.format('$%s$@$+' % nfmt, color=True) for s in specs]
if mode == 'paths':
# Print one spec per line along with prefix path
width = max(len(s) for s in abbreviated)
@@ -102,7 +120,7 @@ def display_specs(specs, **kwargs):
elif mode == 'deps':
for spec in specs:
print spec.tree(
- format='$_$@$+',
+ format='$%s$@$+' % nfmt,
color=True,
indent=4,
prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
@@ -112,7 +130,8 @@ def display_specs(specs, **kwargs):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
- string += s.format('$-_$@$+', color=True)
+ string += s.format('$-%s$@$+' % nfmt, color=True)
+
return string
colify(fmt(s) for s in specs)
@@ -126,7 +145,7 @@ def find(parser, args):
# Filter out specs that don't exist.
query_specs = spack.cmd.parse_specs(args.query_specs)
query_specs, nonexisting = partition_list(
- query_specs, lambda s: spack.db.exists(s.name))
+ query_specs, lambda s: spack.repo.exists(s.name))
if nonexisting:
msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
@@ -136,11 +155,21 @@ def find(parser, args):
if not query_specs:
return
+ # Set up query arguments.
+ installed, known = True, any
+ if args.only_missing:
+ installed = False
+ elif args.missing:
+ installed = any
+ if args.unknown:
+ known = False
+ q_args = { 'installed' : installed, 'known' : known }
+
# Get all the specs the user asked for
if not query_specs:
- specs = set(spack.db.installed_package_specs())
+ specs = set(spack.installed_db.query(**q_args))
else:
- results = [set(spack.db.get_installed(qs)) for qs in query_specs]
+ results = [set(spack.installed_db.query(qs, **q_args)) for qs in query_specs]
specs = set.union(*results)
if not args.mode:
@@ -150,4 +179,5 @@ def find(parser, args):
tty.msg("%d installed packages." % len(specs))
display_specs(specs, mode=args.mode,
long=args.long,
- very_long=args.very_long)
+ very_long=args.very_long,
+ namespace=args.namespace)
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index cb93a1b543..586a852351 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack
import spack.cmd
diff --git a/lib/spack/spack/cmd/help.py b/lib/spack/spack/cmd/help.py
index eae3aabd97..841a0d5bcb 100644
--- a/lib/spack/spack/cmd/help.py
+++ b/lib/spack/spack/cmd/help.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index c6209523f0..e7abe7f4a5 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -65,11 +65,21 @@ def print_text_info(pkg):
print "None"
else:
pad = padder(pkg.variants, 4)
+
+ maxv = max(len(v) for v in sorted(pkg.variants))
+ fmt = "%%-%ss%%-10s%%s" % (maxv + 4)
+
+ print " " + fmt % ('Name', 'Default', 'Description')
+ print
for name in sorted(pkg.variants):
v = pkg.variants[name]
- print " %s%s" % (
- pad(('+' if v.default else '-') + name + ':'),
- "\n".join(textwrap.wrap(v.description)))
+ default = 'on' if v.default else 'off'
+
+ lines = textwrap.wrap(v.description)
+ lines[1:] = [" " + (" " * maxv) + l for l in lines[1:]]
+ desc = "\n".join(lines)
+
+ print " " + fmt % (name, default, desc)
print
print "Dependencies:"
@@ -95,5 +105,5 @@ def print_text_info(pkg):
def info(parser, args):
- pkg = spack.db.get(args.name)
+ pkg = spack.repo.get(args.name)
print_text_info(pkg)
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index acb688a092..1fef750c80 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import llnl.util.tty as tty
@@ -70,11 +70,12 @@ def install(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
- package = spack.db.get(spec)
- package.do_install(
- keep_prefix=args.keep_prefix,
- keep_stage=args.keep_stage,
- ignore_deps=args.ignore_deps,
- make_jobs=args.jobs,
- verbose=args.verbose,
- fake=args.fake)
+ package = spack.repo.get(spec)
+ with spack.installed_db.write_transaction():
+ package.do_install(
+ keep_prefix=args.keep_prefix,
+ keep_stage=args.keep_stage,
+ ignore_deps=args.ignore_deps,
+ make_jobs=args.jobs,
+ verbose=args.verbose,
+ fake=args.fake)
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index 1f0978a18e..7c50ccb9cd 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -24,7 +24,7 @@
##############################################################################
import sys
import llnl.util.tty as tty
-from external import argparse
+import argparse
from llnl.util.tty.colify import colify
import spack
@@ -43,7 +43,7 @@ def setup_parser(subparser):
def list(parser, args):
# Start with all package names.
- pkgs = spack.db.all_package_names()
+ pkgs = spack.repo.all_package_names()
# filter if a filter arg was provided
if args.filter:
diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py
index 06574d9725..30d86c3b01 100644
--- a/lib/spack/spack/cmd/load.py
+++ b/lib/spack/spack/cmd/load.py
@@ -6,7 +6,7 @@
# Written by David Beckingsale, david@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack.modules
description ="Add package to environment using modules."
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index e8e9c3f277..307ee8982d 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -24,7 +24,7 @@
##############################################################################
import os
import sys
-from external import argparse
+import argparse
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
@@ -32,7 +32,7 @@ from llnl.util.filesystem import join_path
import spack
import spack.cmd
-description="Print out locations of various diectories used by Spack"
+description="Print out locations of various directories used by Spack"
def setup_parser(subparser):
global directories
@@ -72,7 +72,7 @@ def location(parser, args):
print spack.prefix
elif args.packages:
- print spack.db.root
+ print spack.repo.root
elif args.stages:
print spack.stage_path
@@ -94,12 +94,12 @@ def location(parser, args):
if args.package_dir:
# This one just needs the spec name.
- print join_path(spack.db.root, spec.name)
+ print join_path(spack.repo.root, spec.name)
else:
# These versions need concretized specs.
spec.concretize()
- pkg = spack.db.get(spec)
+ pkg = spack.repo.get(spec)
if args.stage_dir:
print pkg.stage.path
diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py
index dfa1be412b..ef1e4f3475 100644
--- a/lib/spack/spack/cmd/md5.py
+++ b/lib/spack/spack/cmd/md5.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -24,7 +24,7 @@
##############################################################################
import os
import hashlib
-from external import argparse
+import argparse
import llnl.util.tty as tty
from llnl.util.filesystem import *
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index baf64d30fc..885483a840 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -26,7 +26,7 @@ import os
import sys
from datetime import datetime
-from external import argparse
+import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -36,6 +36,7 @@ import spack.config
import spack.mirror
from spack.spec import Spec
from spack.error import SpackError
+from spack.util.spack_yaml import syaml_dict
description = "Manage mirrors."
@@ -47,6 +48,7 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='mirror_command')
+ # Create
create_parser = sp.add_parser('create', help=mirror_create.__doc__)
create_parser.add_argument('-d', '--directory', default=None,
help="Directory in which to create mirror.")
@@ -55,18 +57,34 @@ def setup_parser(subparser):
create_parser.add_argument(
'-f', '--file', help="File with specs of packages to put in mirror.")
create_parser.add_argument(
+ '-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
+ create_parser.add_argument(
'-o', '--one-version-per-spec', action='store_const', const=1, default=0,
help="Only fetch one 'preferred' version per spec, not all known versions.")
+ scopes = spack.config.config_scopes
+
+ # Add
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
add_parser.add_argument('name', help="Mnemonic name for mirror.")
add_parser.add_argument(
'url', help="URL of mirror directory created by 'spack mirror create'.")
+ add_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
- remove_parser = sp.add_parser('remove', help=mirror_remove.__doc__)
+ # Remove
+ remove_parser = sp.add_parser('remove', aliases=['rm'], help=mirror_remove.__doc__)
remove_parser.add_argument('name')
+ remove_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
+ # List
list_parser = sp.add_parser('list', help=mirror_list.__doc__)
+ list_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
def mirror_add(args):
@@ -75,31 +93,51 @@ def mirror_add(args):
if url.startswith('/'):
url = 'file://' + url
- newmirror = [ { args.name : url } ]
- spack.config.add_to_mirror_config(newmirror)
+ mirrors = spack.config.get_config('mirrors', scope=args.scope)
+ if not mirrors:
+ mirrors = syaml_dict()
+
+ for name, u in mirrors.items():
+ if name == args.name:
+ tty.die("Mirror with name %s already exists." % name)
+ if u == url:
+ tty.die("Mirror with url %s already exists." % url)
+ # should only be one item per mirror dict.
+
+ items = [(n,u) for n,u in mirrors.items()]
+ items.insert(0, (args.name, url))
+ mirrors = syaml_dict(items)
+ spack.config.update_config('mirrors', mirrors, scope=args.scope)
def mirror_remove(args):
"""Remove a mirror by name."""
name = args.name
- rmd_something = spack.config.remove_from_config('mirrors', name)
- if not rmd_something:
- tty.die("No such mirror: %s" % name)
+ mirrors = spack.config.get_config('mirrors', scope=args.scope)
+ if not mirrors:
+ mirrors = syaml_dict()
+
+ if not name in mirrors:
+ tty.die("No mirror with name %s" % name)
+
+ old_value = mirrors.pop(name)
+ spack.config.update_config('mirrors', mirrors, scope=args.scope)
+ tty.msg("Removed mirror %s with url %s." % (name, old_value))
def mirror_list(args):
"""Print out available mirrors to the console."""
- mirrors = spack.config.get_mirror_config()
+ mirrors = spack.config.get_config('mirrors', scope=args.scope)
if not mirrors:
tty.msg("No mirrors configured.")
return
- max_len = max(len(name) for name,path in mirrors)
+ max_len = max(len(n) for n in mirrors.keys())
fmt = "%%-%ds%%s" % (max_len + 4)
- for name, val in mirrors:
- print fmt % (name, val)
+ for name in mirrors:
+ print fmt % (name, mirrors[name])
def _read_specs_from_file(filename):
@@ -118,7 +156,7 @@ def mirror_create(args):
"""Create a directory to be used as a spack mirror, and fill it with
package archives."""
# try to parse specs from the command line first.
- specs = spack.cmd.parse_specs(args.specs)
+ specs = spack.cmd.parse_specs(args.specs, concretize=True)
# If there is a file, parse each line as a spec and add it to the list.
if args.file:
@@ -128,9 +166,17 @@ def mirror_create(args):
# If nothing is passed, use all packages.
if not specs:
- specs = [Spec(n) for n in spack.db.all_package_names()]
+ specs = [Spec(n) for n in spack.repo.all_package_names()]
specs.sort(key=lambda s: s.format("$_$@").lower())
+ if args.dependencies:
+ new_specs = set()
+ for spec in specs:
+ spec.concretize()
+ for s in spec.traverse():
+ new_specs.add(s)
+ specs = list(new_specs)
+
# Default name for directory is spack-mirror-<DATESTAMP>
directory = args.directory
if not directory:
@@ -165,6 +211,7 @@ def mirror(parser, args):
action = { 'create' : mirror_create,
'add' : mirror_add,
'remove' : mirror_remove,
+ 'rm' : mirror_remove,
'list' : mirror_list }
action[args.mirror_command](args)
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 34f0855a50..a5a9570eb5 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -25,7 +25,7 @@
import sys
import os
import shutil
-from external import argparse
+import argparse
import llnl.util.tty as tty
from llnl.util.lang import partition_list
@@ -65,7 +65,7 @@ def module_find(mtype, spec_array):
tty.die("You can only pass one spec.")
spec = specs[0]
- specs = [s for s in spack.db.installed_package_specs() if s.satisfies(spec)]
+ specs = spack.installed_db.query(spec)
if len(specs) == 0:
tty.die("No installed packages match spec %s" % spec)
@@ -86,7 +86,7 @@ def module_find(mtype, spec_array):
def module_refresh():
"""Regenerate all module files for installed packages known to
spack (some packages may no longer exist)."""
- specs = [s for s in spack.db.installed_known_package_specs()]
+ specs = [s for s in spack.installed_db.query(installed=True, known=True)]
for name, cls in module_types.items():
tty.msg("Regenerating %s module files." % name)
diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py
index f048482845..5e37d5c16b 100644
--- a/lib/spack/spack/cmd/package-list.py
+++ b/lib/spack/spack/cmd/package-list.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -34,7 +34,7 @@ description = "Print a list of all packages in reStructuredText."
def github_url(pkg):
"""Link to a package file on github."""
- return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" %
+ return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" %
pkg.name)
@@ -48,7 +48,7 @@ def rst_table(elts):
def print_rst_package_list():
"""Print out information on all packages in restructured text."""
- pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
+ pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
print ".. _package-list:"
print
diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py
index a6556c4828..44fc8696db 100644
--- a/lib/spack/spack/cmd/patch.py
+++ b/lib/spack/spack/cmd/patch.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack.cmd
import spack
@@ -47,5 +47,5 @@ def patch(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
- package = spack.db.get(spec)
+ package = spack.repo.get(spec)
package.do_patch()
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index 055b7c2062..cf478d3763 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -24,7 +24,7 @@
##############################################################################
import os
-from external import argparse
+import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -79,13 +79,13 @@ def list_packages(rev):
git = get_git()
relpath = spack.packages_path[len(spack.prefix + os.path.sep):] + os.path.sep
output = git('ls-tree', '--full-tree', '--name-only', rev, relpath,
- return_output=True)
+ output=str)
return sorted(line[len(relpath):] for line in output.split('\n') if line)
def pkg_add(args):
for pkg_name in args.packages:
- filename = spack.db.filename_for_package_name(pkg_name)
+ filename = spack.repo.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py
index 2bcdc9fba2..49d6ac192a 100644
--- a/lib/spack/spack/cmd/providers.py
+++ b/lib/spack/spack/cmd/providers.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from external import argparse
+import argparse
from llnl.util.tty.colify import colify
@@ -39,4 +39,4 @@ def setup_parser(subparser):
def providers(parser, args):
for spec in spack.cmd.parse_specs(args.vpkg_spec):
- colify(sorted(spack.db.providers_for(spec)), indent=4)
+ colify(sorted(spack.repo.providers_for(spec)), indent=4)
diff --git a/lib/spack/spack/cmd/purge.py b/lib/spack/spack/cmd/purge.py
index 9b96937149..d5d7513c46 100644
--- a/lib/spack/spack/cmd/purge.py
+++ b/lib/spack/spack/cmd/purge.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py
index 7bd2e45ce0..5325e8fd9a 100644
--- a/lib/spack/spack/cmd/python.py
+++ b/lib/spack/spack/cmd/python.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -25,20 +25,23 @@
import os
import sys
import code
-from external import argparse
+import argparse
import platform
import spack
def setup_parser(subparser):
subparser.add_argument(
+ '-c', dest='python_command', help='Command to execute.')
+ subparser.add_argument(
'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.")
description = "Launch an interpreter as spack would launch a command"
def python(parser, args):
# Fake a main python shell by setting __name__ to __main__.
- console = code.InteractiveConsole({'__name__' : '__main__'})
+ console = code.InteractiveConsole({'__name__' : '__main__',
+ 'spack' : spack})
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
@@ -47,7 +50,10 @@ def python(parser, args):
console.runsource(startup.read(), startup_file, 'exec')
python_args = args.python_args
- if python_args:
+ python_command = args.python_command
+ if python_command:
+ console.runsource(python_command)
+ elif python_args:
sys.argv = python_args
with open(python_args[0]) as file:
console.runsource(file.read(), python_args[0], 'exec')
diff --git a/lib/spack/spack/cmd/reindex.py b/lib/spack/spack/cmd/reindex.py
new file mode 100644
index 0000000000..2b30ef8814
--- /dev/null
+++ b/lib/spack/spack/cmd/reindex.py
@@ -0,0 +1,31 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import argparse
+import spack
+
+description = "Rebuild Spack's package database."
+
+def reindex(parser, args):
+ spack.installed_db.reindex(spack.install_layout)
diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py
new file mode 100644
index 0000000000..34c755fb67
--- /dev/null
+++ b/lib/spack/spack/cmd/repo.py
@@ -0,0 +1,218 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import re
+import shutil
+
+from external import argparse
+import llnl.util.tty as tty
+from llnl.util.filesystem import join_path, mkdirp
+
+import spack.spec
+import spack.config
+from spack.util.environment import get_path
+from spack.repository import *
+
+description = "Manage package source repositories."
+
+def setup_parser(subparser):
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
+ scopes = spack.config.config_scopes
+
+ # Create
+ create_parser = sp.add_parser('create', help=repo_create.__doc__)
+ create_parser.add_argument(
+ 'directory', help="Directory to create the repo in.")
+ create_parser.add_argument(
+ 'namespace', help="Namespace to identify packages in the repository. "
+ "Defaults to the directory name.", nargs='?')
+
+ # List
+ list_parser = sp.add_parser('list', help=repo_list.__doc__)
+ list_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
+
+ # Add
+ add_parser = sp.add_parser('add', help=repo_add.__doc__)
+ add_parser.add_argument('path', help="Path to a Spack package repository directory.")
+ add_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
+
+ # Remove
+ remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm'])
+ remove_parser.add_argument(
+ 'path_or_namespace',
+ help="Path or namespace of a Spack package repository.")
+ remove_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
+
+
+def repo_create(args):
+ """Create a new package repository."""
+ root = canonicalize_path(args.directory)
+ namespace = args.namespace
+
+ if not args.namespace:
+ namespace = os.path.basename(root)
+
+ if not re.match(r'\w[\.\w-]*', namespace):
+ tty.die("'%s' is not a valid namespace." % namespace)
+
+ existed = False
+ if os.path.exists(root):
+ if os.path.isfile(root):
+ tty.die('File %s already exists and is not a directory' % root)
+ elif os.path.isdir(root):
+ if not os.access(root, os.R_OK | os.W_OK):
+ tty.die('Cannot create new repo in %s: cannot access directory.' % root)
+ if os.listdir(root):
+ tty.die('Cannot create new repo in %s: directory is not empty.' % root)
+ existed = True
+
+ full_path = os.path.realpath(root)
+ parent = os.path.dirname(full_path)
+ if not os.access(parent, os.R_OK | os.W_OK):
+ tty.die("Cannot create repository in %s: can't access parent!" % root)
+
+ try:
+ config_path = os.path.join(root, repo_config_name)
+ packages_path = os.path.join(root, packages_dir_name)
+
+ mkdirp(packages_path)
+ with open(config_path, 'w') as config:
+ config.write("repo:\n")
+ config.write(" namespace: '%s'\n" % namespace)
+
+ except (IOError, OSError) as e:
+ tty.die('Failed to create new repository in %s.' % root,
+ "Caused by %s: %s" % (type(e), e))
+
+ # try to clean up.
+ if existed:
+ shutil.rmtree(config_path, ignore_errors=True)
+ shutil.rmtree(packages_path, ignore_errors=True)
+ else:
+ shutil.rmtree(root, ignore_errors=True)
+
+ tty.msg("Created repo with namespace '%s'." % namespace)
+ tty.msg("To register it with spack, run this command:",
+ 'spack repo add %s' % full_path)
+
+
+def repo_add(args):
+ """Add a package source to Spack's configuration."""
+ path = args.path
+
+ # real_path is absolute and handles substitution.
+ canon_path = canonicalize_path(path)
+
+ # check if the path exists
+ if not os.path.exists(canon_path):
+ tty.die("No such file or directory: '%s'." % path)
+
+ # Make sure the path is a directory.
+ if not os.path.isdir(canon_path):
+ tty.die("Not a Spack repository: '%s'." % path)
+
+ # Make sure it's actually a spack repository by constructing it.
+ repo = Repo(canon_path)
+
+ # If that succeeds, finally add it to the configuration.
+ repos = spack.config.get_config('repos', args.scope)
+ if not repos: repos = []
+
+ if repo.root in repos or path in repos:
+ tty.die("Repository is already registered with Spack: '%s'" % path)
+
+ repos.insert(0, canon_path)
+ spack.config.update_config('repos', repos, args.scope)
+ tty.msg("Created repo with namespace '%s'." % repo.namespace)
+
+
+def repo_remove(args):
+ """Remove a repository from Spack's configuration."""
+ repos = spack.config.get_config('repos', args.scope)
+ path_or_namespace = args.path_or_namespace
+
+ # If the argument is a path, remove that repository from config.
+ canon_path = canonicalize_path(path_or_namespace)
+ for repo_path in repos:
+ repo_canon_path = canonicalize_path(repo_path)
+ if canon_path == repo_canon_path:
+ repos.remove(repo_path)
+ spack.config.update_config('repos', repos, args.scope)
+ tty.msg("Removed repository '%s'." % repo_path)
+ return
+
+ # If it is a namespace, remove corresponding repo
+ for path in repos:
+ try:
+ repo = Repo(path)
+ if repo.namespace == path_or_namespace:
+ repos.remove(path)
+ spack.config.update_config('repos', repos, args.scope)
+ tty.msg("Removed repository '%s' with namespace %s."
+ % (repo.root, repo.namespace))
+ return
+ except RepoError as e:
+ continue
+
+ tty.die("No repository with path or namespace: '%s'"
+ % path_or_namespace)
+
+
+def repo_list(args):
+ """Show registered repositories and their namespaces."""
+ roots = spack.config.get_config('repos', args.scope)
+ repos = []
+ for r in roots:
+ try:
+ repos.append(Repo(r))
+ except RepoError as e:
+ continue
+
+ msg = "%d package repositor" % len(repos)
+ msg += "y." if len(repos) == 1 else "ies."
+ tty.msg(msg)
+
+ if not repos:
+ return
+
+ max_ns_len = max(len(r.namespace) for r in repos)
+ for repo in repos:
+ fmt = "%%-%ds%%s" % (max_ns_len + 4)
+ print fmt % (repo.namespace, repo.root)
+
+
+def repo(parser, args):
+ action = { 'create' : repo_create,
+ 'list' : repo_list,
+ 'add' : repo_add,
+ 'remove' : repo_remove,
+ 'rm' : repo_remove}
+ action[args.repo_command](args)
diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py
index e735a12c32..540c2ef2a5 100644
--- a/lib/spack/spack/cmd/restage.py
+++ b/lib/spack/spack/cmd/restage.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import llnl.util.tty as tty
@@ -42,5 +42,5 @@ def restage(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
- package = spack.db.get(spec)
+ package = spack.repo.get(spec)
package.do_restage()
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index 407519313c..43a106ea37 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack.cmd
import llnl.util.tty as tty
diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py
index f3dc97be17..5786780efb 100644
--- a/lib/spack/spack/cmd/stage.py
+++ b/lib/spack/spack/cmd/stage.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,7 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from external import argparse
+import argparse
import llnl.util.tty as tty
import spack
@@ -49,5 +49,5 @@ def stage(parser, args):
specs = spack.cmd.parse_specs(args.specs, concretize=True)
for spec in specs:
- package = spack.db.get(spec)
+ package = spack.repo.get(spec)
package.do_stage()
diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py
new file mode 100644
index 0000000000..74017f59fb
--- /dev/null
+++ b/lib/spack/spack/cmd/test-install.py
@@ -0,0 +1,211 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import argparse
+import xml.etree.ElementTree as ET
+import itertools
+import re
+import os
+import codecs
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import *
+
+import spack
+from spack.build_environment import InstallError
+from spack.fetch_strategy import FetchError
+import spack.cmd
+
+description = "Treat package installations as unit tests and output formatted test results"
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-j', '--jobs', action='store', type=int,
+ help="Explicitly set number of make jobs. Default is #cpus.")
+
+ subparser.add_argument(
+ '-n', '--no-checksum', action='store_true', dest='no_checksum',
+ help="Do not check packages against checksum")
+
+ subparser.add_argument(
+ '-o', '--output', action='store', help="test output goes in this file")
+
+ subparser.add_argument(
+ 'package', nargs=argparse.REMAINDER, help="spec of package to install")
+
+
+class JunitResultFormat(object):
+ def __init__(self):
+ self.root = ET.Element('testsuite')
+ self.tests = []
+
+ def add_test(self, buildId, testResult, buildInfo=None):
+ self.tests.append((buildId, testResult, buildInfo))
+
+ def write_to(self, stream):
+ self.root.set('tests', '{0}'.format(len(self.tests)))
+ for buildId, testResult, buildInfo in self.tests:
+ testcase = ET.SubElement(self.root, 'testcase')
+ testcase.set('classname', buildId.name)
+ testcase.set('name', buildId.stringId())
+ if testResult == TestResult.FAILED:
+ failure = ET.SubElement(testcase, 'failure')
+ failure.set('type', "Build Error")
+ failure.text = buildInfo
+ elif testResult == TestResult.SKIPPED:
+ skipped = ET.SubElement(testcase, 'skipped')
+ skipped.set('type', "Skipped Build")
+ skipped.text = buildInfo
+ ET.ElementTree(self.root).write(stream)
+
+
+class TestResult(object):
+ PASSED = 0
+ FAILED = 1
+ SKIPPED = 2
+
+
+class BuildId(object):
+ def __init__(self, spec):
+ self.name = spec.name
+ self.version = spec.version
+ self.hashId = spec.dag_hash()
+
+ def stringId(self):
+ return "-".join(str(x) for x in (self.name, self.version, self.hashId))
+
+ def __hash__(self):
+ return hash((self.name, self.version, self.hashId))
+
+ def __eq__(self, other):
+ if not isinstance(other, BuildId):
+ return False
+
+ return ((self.name, self.version, self.hashId) ==
+ (other.name, other.version, other.hashId))
+
+
+def fetch_log(path):
+ if not os.path.exists(path):
+ return list()
+ with codecs.open(path, 'rb', 'utf-8') as F:
+ return list(line.strip() for line in F.readlines())
+
+
+def failed_dependencies(spec):
+ return set(childSpec for childSpec in spec.dependencies.itervalues() if not
+ spack.repo.get(childSpec).installed)
+
+
+def create_test_output(topSpec, newInstalls, output, getLogFunc=fetch_log):
+ # Post-order traversal is not strictly required but it makes sense to output
+ # tests for dependencies first.
+ for spec in topSpec.traverse(order='post'):
+ if spec not in newInstalls:
+ continue
+
+ failedDeps = failed_dependencies(spec)
+ package = spack.repo.get(spec)
+ if failedDeps:
+ result = TestResult.SKIPPED
+ dep = iter(failedDeps).next()
+ depBID = BuildId(dep)
+ errOutput = "Skipped due to failed dependency: {0}".format(
+ depBID.stringId())
+ elif (not package.installed) and (not package.stage.source_path):
+ result = TestResult.FAILED
+ errOutput = "Failure to fetch package resources."
+ elif not package.installed:
+ result = TestResult.FAILED
+ lines = getLogFunc(package.build_log_path)
+ errMessages = list(line for line in lines if
+ re.search('error:', line, re.IGNORECASE))
+ errOutput = errMessages if errMessages else lines[-10:]
+ errOutput = '\n'.join(itertools.chain(
+ [spec.to_yaml(), "Errors:"], errOutput,
+ ["Build Log:", package.build_log_path]))
+ else:
+ result = TestResult.PASSED
+ errOutput = None
+
+ bId = BuildId(spec)
+ output.add_test(bId, result, errOutput)
+
+
+def test_install(parser, args):
+ if not args.package:
+ tty.die("install requires a package argument")
+
+ if args.jobs is not None:
+ if args.jobs <= 0:
+ tty.die("The -j option must be a positive integer!")
+
+ if args.no_checksum:
+ spack.do_checksum = False # TODO: remove this global.
+
+ specs = spack.cmd.parse_specs(args.package, concretize=True)
+ if len(specs) > 1:
+ tty.die("Only 1 top-level package can be specified")
+ topSpec = iter(specs).next()
+
+ newInstalls = set()
+ for spec in topSpec.traverse():
+ package = spack.repo.get(spec)
+ if not package.installed:
+ newInstalls.add(spec)
+
+ if not args.output:
+ bId = BuildId(topSpec)
+ outputDir = join_path(os.getcwd(), "test-output")
+ if not os.path.exists(outputDir):
+ os.mkdir(outputDir)
+ outputFpath = join_path(outputDir, "test-{0}.xml".format(bId.stringId()))
+ else:
+ outputFpath = args.output
+
+ for spec in topSpec.traverse(order='post'):
+ # Calling do_install for the top-level package would be sufficient but
+ # this attempts to keep going if any package fails (other packages which
+ # are not dependents may succeed)
+ package = spack.repo.get(spec)
+ if (not failed_dependencies(spec)) and (not package.installed):
+ try:
+ package.do_install(
+ keep_prefix=False,
+ keep_stage=True,
+ ignore_deps=False,
+ make_jobs=args.jobs,
+ verbose=True,
+ fake=False)
+ except InstallError:
+ pass
+ except FetchError:
+ pass
+
+ jrf = JunitResultFormat()
+ handled = {}
+ create_test_output(topSpec, newInstalls, jrf)
+
+ with open(outputFpath, 'wb') as F:
+ jrf.write_to(F)
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index b1418ac2f1..ddc6cb4fce 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,8 +22,10 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import os
from pprint import pprint
+from llnl.util.filesystem import join_path, mkdirp
from llnl.util.tty.colify import colify
from llnl.util.lang import list_modules
@@ -38,6 +40,12 @@ def setup_parser(subparser):
subparser.add_argument(
'-l', '--list', action='store_true', dest='list', help="Show available tests")
subparser.add_argument(
+ '--createXmlOutput', action='store_true', dest='createXmlOutput',
+ help="Create JUnit XML from test results")
+ subparser.add_argument(
+ '--xmlOutputDir', dest='xmlOutputDir',
+ help="Nose creates XML files in this directory")
+ subparser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose',
help="verbose output")
@@ -48,4 +56,14 @@ def test(parser, args):
colify(spack.test.list_tests(), indent=2)
else:
- spack.test.run(args.names, args.verbose)
+ if not args.createXmlOutput:
+ outputDir = None
+ else:
+ if not args.xmlOutputDir:
+ outputDir = join_path(os.getcwd(), "test-output")
+ else:
+ outputDir = os.path.abspath(args.xmlOutputDir)
+
+ if not os.path.exists(outputDir):
+ mkdirp(outputDir)
+ spack.test.run(args.names, outputDir, args.verbose)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index aa62510fed..d01aa2136b 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,14 +23,14 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-from external import argparse
+import argparse
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
import spack
import spack.cmd
-import spack.packages
+import spack.repository
from spack.cmd.find import display_specs
from spack.package import PackageStillNeededError
@@ -42,9 +42,9 @@ def setup_parser(subparser):
help="Remove regardless of whether other packages depend on this one.")
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
- help="USE CAREFULLY. Remove ALL installed packages that match each supplied spec. " +
- "i.e., if you say uninstall libelf, ALL versions of libelf are uninstalled. " +
- "This is both useful and dangerous, like rm -r.")
+ help="USE CAREFULLY. Remove ALL installed packages that match each " +
+ "supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
+ "libelf are uninstalled. This is both useful and dangerous, like rm -r.")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
@@ -53,51 +53,52 @@ def uninstall(parser, args):
if not args.packages:
tty.die("uninstall requires at least one package argument.")
- specs = spack.cmd.parse_specs(args.packages)
+ with spack.installed_db.write_transaction():
+ specs = spack.cmd.parse_specs(args.packages)
- # For each spec provided, make sure it refers to only one package.
- # Fail and ask user to be unambiguous if it doesn't
- pkgs = []
- for spec in specs:
- matching_specs = spack.db.get_installed(spec)
- if not args.all and len(matching_specs) > 1:
- tty.error("%s matches multiple packages:" % spec)
- print
- display_specs(matching_specs, long=True)
- print
- print "You can either:"
- print " a) Use a more specific spec, or"
- print " b) use spack uninstall -a to uninstall ALL matching specs."
- sys.exit(1)
+ # For each spec provided, make sure it refers to only one package.
+ # Fail and ask user to be unambiguous if it doesn't
+ pkgs = []
+ for spec in specs:
+ matching_specs = spack.installed_db.query(spec)
+ if not args.all and len(matching_specs) > 1:
+ tty.error("%s matches multiple packages:" % spec)
+ print
+ display_specs(matching_specs, long=True)
+ print
+ print "You can either:"
+ print " a) Use a more specific spec, or"
+ print " b) use spack uninstall -a to uninstall ALL matching specs."
+ sys.exit(1)
- if len(matching_specs) == 0:
- if args.force: continue
- tty.die("%s does not match any installed packages." % spec)
+ if len(matching_specs) == 0:
+ if args.force: continue
+ tty.die("%s does not match any installed packages." % spec)
- for s in matching_specs:
- try:
- # should work if package is known to spack
- pkgs.append(s.package)
-
- except spack.packages.UnknownPackageError, e:
- # The package.py file has gone away -- but still want to uninstall.
- spack.Package(s).do_uninstall(force=True)
+ for s in matching_specs:
+ try:
+ # should work if package is known to spack
+ pkgs.append(s.package)
+ except spack.repository.UnknownPackageError, e:
+ # The package.py file has gone away -- but still
+ # want to uninstall.
+ spack.Package(s).do_uninstall(force=True)
- # Sort packages to be uninstalled by the number of installed dependents
- # This ensures we do things in the right order
- def num_installed_deps(pkg):
- return len(pkg.installed_dependents)
- pkgs.sort(key=num_installed_deps)
+ # Sort packages to be uninstalled by the number of installed dependents
+ # This ensures we do things in the right order
+ def num_installed_deps(pkg):
+ return len(pkg.installed_dependents)
+ pkgs.sort(key=num_installed_deps)
- # Uninstall packages in order now.
- for pkg in pkgs:
- try:
- pkg.do_uninstall(force=args.force)
- except PackageStillNeededError, e:
- tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True))
- print
- print "The following packages depend on it:"
- display_specs(e.dependents, long=True)
- print
- print "You can use spack uninstall -f to force this action."
- sys.exit(1)
+ # Uninstall packages in order now.
+ for pkg in pkgs:
+ try:
+ pkg.do_uninstall(force=args.force)
+ except PackageStillNeededError, e:
+ tty.error("Will not uninstall %s" % e.spec.format("$_$@$%@$#", color=True))
+ print
+ print "The following packages depend on it:"
+ display_specs(e.dependents, long=True)
+ print
+ print "You can use spack uninstall -f to force this action."
+ sys.exit(1)
diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py
index 6442c48cb1..cfb640ee6f 100644
--- a/lib/spack/spack/cmd/unload.py
+++ b/lib/spack/spack/cmd/unload.py
@@ -6,7 +6,7 @@
# Written by David Beckingsale, david@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack.modules
description ="Remove package from environment using module."
diff --git a/lib/spack/spack/cmd/unuse.py b/lib/spack/spack/cmd/unuse.py
index 2a7229a3a0..06176a976b 100644
--- a/lib/spack/spack/cmd/unuse.py
+++ b/lib/spack/spack/cmd/unuse.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack.modules
description ="Remove package from environment using dotkit."
diff --git a/lib/spack/spack/cmd/url-parse.py b/lib/spack/spack/cmd/url-parse.py
new file mode 100644
index 0000000000..077c793d2e
--- /dev/null
+++ b/lib/spack/spack/cmd/url-parse.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import sys
+
+import llnl.util.tty as tty
+
+import spack
+import spack.url
+from spack.util.web import find_versions_of_archive
+
+description = "Show parsing of a URL, optionally spider web for other versions."
+
+def setup_parser(subparser):
+ subparser.add_argument('url', help="url of a package archive")
+ subparser.add_argument(
+ '-s', '--spider', action='store_true', help="Spider the source page for versions.")
+
+
+def print_name_and_version(url):
+ name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
+ underlines = [" "] * max(ns+nl, vs+vl)
+ for i in range(ns, ns+nl):
+ underlines[i] = '-'
+ for i in range(vs, vs+vl):
+ underlines[i] = '~'
+
+ print " %s" % url
+ print " %s" % ''.join(underlines)
+
+
+def url_parse(parser, args):
+ url = args.url
+
+ ver, vs, vl = spack.url.parse_version_offset(url)
+ name, ns, nl = spack.url.parse_name_offset(url, ver)
+
+ tty.msg("Parsing URL:")
+ try:
+ print_name_and_version(url)
+ except spack.url.UrlParseError as e:
+ tty.error(str(e))
+
+ print
+ tty.msg("Substituting version 9.9.9b:")
+ newurl = spack.url.substitute_version(url, '9.9.9b')
+ print_name_and_version(newurl)
+
+ if args.spider:
+ print
+ tty.msg("Spidering for versions:")
+ versions = find_versions_of_archive(url)
+ for v in sorted(versions):
+ print "%-20s%s" % (v, versions[v])
diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py
index 417ce3ab68..ebab65f7d1 100644
--- a/lib/spack/spack/cmd/urls.py
+++ b/lib/spack/spack/cmd/urls.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -41,7 +41,7 @@ def setup_parser(subparser):
def urls(parser, args):
urls = set()
- for pkg in spack.db.all_packages():
+ for pkg in spack.repo.all_packages():
url = getattr(pkg.__class__, 'url', None)
if url:
urls.add(url)
diff --git a/lib/spack/spack/cmd/use.py b/lib/spack/spack/cmd/use.py
index e34c194739..c09695cfd3 100644
--- a/lib/spack/spack/cmd/use.py
+++ b/lib/spack/spack/cmd/use.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,7 +22,7 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from external import argparse
+import argparse
import spack.modules
description ="Add package to environment using dotkit."
diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py
index ed16728261..bba75dae96 100644
--- a/lib/spack/spack/cmd/versions.py
+++ b/lib/spack/spack/cmd/versions.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -34,7 +34,7 @@ def setup_parser(subparser):
def versions(parser, args):
- pkg = spack.db.get(args.package)
+ pkg = spack.repo.get(args.package)
safe_versions = pkg.versions
fetched_versions = pkg.fetch_remote_versions()
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 1e800a8979..12c02e0ea2 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -51,7 +51,7 @@ _version_cache = {}
def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
if not compiler_path in _version_cache:
compiler = Executable(compiler_path)
- output = compiler(version_arg, return_output=True, error=None)
+ output = compiler(version_arg, output=str, error=str)
match = re.search(regex, output)
_version_cache[compiler_path] = match.group(1) if match else 'unknown'
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index b7b021a1ac..6159ef576c 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -27,6 +27,7 @@ system and configuring Spack to use multiple compilers.
"""
import imp
import os
+import platform
from llnl.util.lang import memoized, list_modules
from llnl.util.filesystem import join_path
@@ -35,6 +36,7 @@ import spack
import spack.error
import spack.spec
import spack.config
+import spack.architecture
from spack.util.multiproc import parmap
from spack.compiler import Compiler
@@ -45,50 +47,128 @@ from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
-_default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc']
+# TODO: customize order in config file
+if platform.system() == 'Darwin':
+ _default_order = ['clang', 'gcc', 'intel']
+else:
+ _default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc', 'nag']
+
def _auto_compiler_spec(function):
- def converter(cspec_like):
+ def converter(cspec_like, *args, **kwargs):
if not isinstance(cspec_like, spack.spec.CompilerSpec):
cspec_like = spack.spec.CompilerSpec(cspec_like)
- return function(cspec_like)
+ return function(cspec_like, *args, **kwargs)
return converter
-def _get_config():
- """Get a Spack config, but make sure it has compiler configuration
- first."""
+def _to_dict(compiler):
+ """Return a dict version of compiler suitable to insert in YAML."""
+ return {
+ str(compiler.spec) : dict(
+ (attr, getattr(compiler, attr, None))
+ for attr in _required_instance_vars)
+ }
+
+
+def get_compiler_config(arch=None, scope=None):
+ """Return the compiler configuration for the specified architecture.
+ """
# If any configuration file has compilers, just stick with the
# ones already configured.
- config = spack.config.get_compilers_config()
- existing = [spack.spec.CompilerSpec(s)
- for s in config]
- if existing:
- return config
+ config = spack.config.get_config('compilers', scope=scope)
+
+ my_arch = spack.architecture.sys_type()
+ if arch is None:
+ arch = my_arch
+
+ if arch in config:
+ return config[arch]
+
+ # Only for the current arch in *highest* scope: automatically try to
+ # find compilers if none are configured yet.
+ if arch == my_arch and scope == 'user':
+ config[arch] = {}
+ compilers = find_compilers(*get_path('PATH'))
+ for compiler in compilers:
+ config[arch].update(_to_dict(compiler))
+ spack.config.update_config('compilers', config, scope=scope)
+ return config[arch]
+
+ return {}
+
+
+def add_compilers_to_config(compilers, arch=None, scope=None):
+ """Add compilers to the config for the specified architecture.
+
+ Arguments:
+ - compilers: a list of Compiler objects.
+ - arch: arch to add compilers for.
+ - scope: configuration scope to modify.
+ """
+ if arch is None:
+ arch = spack.architecture.sys_type()
+
+ compiler_config = get_compiler_config(arch, scope)
+ for compiler in compilers:
+ compiler_config[str(compiler.spec)] = dict(
+ (c, getattr(compiler, c, "None"))
+ for c in _required_instance_vars)
+
+ update = { arch : compiler_config }
+ spack.config.update_config('compilers', update, scope)
+
+
+@_auto_compiler_spec
+def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
+ """Remove compilers from the config, by spec.
+
+ Arguments:
+ - compiler_specs: a list of CompilerSpec objects.
+ - arch: arch to add compilers for.
+ - scope: configuration scope to modify.
+ """
+ if arch is None:
+ arch = spack.architecture.sys_type()
- compilers = find_compilers(*get_path('PATH'))
- add_compilers_to_config('user', *compilers)
+ compiler_config = get_compiler_config(arch, scope)
+ del compiler_config[str(compiler_spec)]
+ update = { arch : compiler_config }
- # After writing compilers to the user config, return a full config
- # from all files.
- return spack.config.get_compilers_config()
+ spack.config.update_config('compilers', update, scope)
+
+
+def all_compilers_config(arch=None, scope=None):
+ """Return a set of specs for all the compiler versions currently
+ available to build with. These are instances of CompilerSpec.
+ """
+ # Get compilers for this architecture.
+ arch_config = get_compiler_config(arch, scope)
+
+ # Merge 'all' compilers with arch-specific ones.
+ # Arch-specific compilers have higher precedence.
+ merged_config = get_compiler_config('all', scope=scope)
+ merged_config = spack.config._merge_yaml(merged_config, arch_config)
+
+ return merged_config
+
+
+def all_compilers(arch=None, scope=None):
+ # Return compiler specs from the merged config.
+ return [spack.spec.CompilerSpec(s)
+ for s in all_compilers_config(arch, scope)]
-_cached_default_compiler = None
def default_compiler():
- global _cached_default_compiler
- if _cached_default_compiler:
- return _cached_default_compiler
versions = []
- for name in _default_order: # TODO: customize order.
+ for name in _default_order:
versions = find(name)
- if versions: break
-
- if not versions:
+ if versions:
+ break
+ else:
raise NoCompilersError()
- _cached_default_compiler = sorted(versions)[-1]
- return _cached_default_compiler
+ return sorted(versions)[-1]
def find_compilers(*path):
@@ -123,20 +203,6 @@ def find_compilers(*path):
return clist
-def add_compilers_to_config(scope, *compilers):
- compiler_config_tree = {}
- for compiler in compilers:
- compiler_entry = {}
- for c in _required_instance_vars:
- val = getattr(compiler, c)
- if not val:
- val = "None"
- compiler_entry[c] = val
- compiler_config_tree[str(compiler.spec)] = compiler_entry
- spack.config.add_to_compiler_config(compiler_config_tree, scope)
-
-
-
def supported_compilers():
"""Return a set of names of compilers supported by Spack.
@@ -152,27 +218,19 @@ def supported(compiler_spec):
return compiler_spec.name in supported_compilers()
-def all_compilers():
- """Return a set of specs for all the compiler versions currently
- available to build with. These are instances of CompilerSpec.
- """
- configuration = _get_config()
- return [spack.spec.CompilerSpec(s) for s in configuration]
-
-
@_auto_compiler_spec
-def find(compiler_spec):
+def find(compiler_spec, arch=None, scope=None):
"""Return specs of available compilers that match the supplied
compiler spec. Return an list if nothing found."""
- return [c for c in all_compilers() if c.satisfies(compiler_spec)]
+ return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
@_auto_compiler_spec
-def compilers_for_spec(compiler_spec):
+def compilers_for_spec(compiler_spec, arch=None, scope=None):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
- config = _get_config()
+ config = all_compilers_config(arch, scope)
def get_compiler(cspec):
items = config[str(cspec)]
@@ -191,7 +249,7 @@ def compilers_for_spec(compiler_spec):
return cls(cspec, *compiler_paths)
- matches = find(compiler_spec)
+ matches = find(compiler_spec, arch, scope)
return [get_compiler(cspec) for cspec in matches]
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index 790901c86e..340051019c 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -37,6 +37,12 @@ class Clang(Compiler):
# Subclasses use possible names of Fortran 90 compiler
fc_names = []
+ # Named wrapper links within spack.build_env_path
+ link_paths = { 'cc' : 'clang/clang',
+ 'cxx' : 'clang/clang++',
+ # Use default wrappers for fortran, in case provided in compilers.yaml
+ 'f77' : 'f77',
+ 'fc' : 'f90' }
@classmethod
def default_version(self, comp):
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index f0d27d590e..495b638a3a 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -42,6 +42,12 @@ class Gcc(Compiler):
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
suffixes = [r'-mp-\d\.\d']
+ # Named wrapper links within spack.build_env_path
+ link_paths = {'cc' : 'gcc/gcc',
+ 'cxx' : 'gcc/g++',
+ 'f77' : 'gcc/gfortran',
+ 'fc' : 'gcc/gfortran' }
+
@property
def cxx11_flag(self):
if self.version < ver('4.3'):
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 2a72c4eaea..69e9764790 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -37,6 +37,12 @@ class Intel(Compiler):
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['ifort']
+ # Named wrapper links within spack.build_env_path
+ link_paths = { 'cc' : 'intel/icc',
+ 'cxx' : 'intel/icpc',
+ 'f77' : 'intel/ifort',
+ 'fc' : 'intel/ifort' }
+
@property
def cxx11_flag(self):
if self.version < ver('11.1'):
diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py
new file mode 100644
index 0000000000..f1cc6be0d5
--- /dev/null
+++ b/lib/spack/spack/compilers/nag.py
@@ -0,0 +1,33 @@
+from spack.compiler import *
+
+class Nag(Compiler):
+ # Subclasses use possible names of C compiler
+ cc_names = []
+
+ # Subclasses use possible names of C++ compiler
+ cxx_names = []
+
+ # Subclasses use possible names of Fortran 77 compiler
+ f77_names = ['nagfor']
+
+ # Subclasses use possible names of Fortran 90 compiler
+ fc_names = ['nagfor']
+
+ # Named wrapper links within spack.build_env_path
+ link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml
+ 'cc' : 'cc',
+ 'cxx' : 'cxx',
+ 'f77' : 'nag/nagfor',
+ 'fc' : 'nag/nagfor' }
+
+ @classmethod
+ def default_version(self, comp):
+ """The '-V' option works for nag compilers.
+ Output looks like this::
+
+ NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
+ Product NPL6A60NA for x86-64 Linux
+ Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K.
+ """
+ return get_compiler_version(
+ comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)')
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index d97f24c12e..9ac74cfbdb 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -37,6 +37,12 @@ class Pgi(Compiler):
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['pgf95', 'pgf90']
+ # Named wrapper links within spack.build_env_path
+ link_paths = { 'cc' : 'pgi/pgcc',
+ 'cxx' : 'pgi/case-insensitive/pgCC',
+ 'f77' : 'pgi/pgf77',
+ 'fc' : 'pgi/pgf90' }
+
@classmethod
def default_version(cls, comp):
"""The '-V' option works for all the PGI compilers.
diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py
index 562186b865..c1d55109a3 100644
--- a/lib/spack/spack/compilers/xl.py
+++ b/lib/spack/spack/compilers/xl.py
@@ -7,7 +7,7 @@
# Written by François Bissey, francois.bissey@canterbury.ac.nz, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -38,6 +38,12 @@ class Xl(Compiler):
# Subclasses use possible names of Fortran 90 compiler
fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r']
+ # Named wrapper links within spack.build_env_path
+ link_paths = { 'cc' : 'xl/xlc',
+ 'cxx' : 'xl/xlc++',
+ 'f77' : 'xl/xlf',
+ 'fc' : 'xl/xlf90' }
+
@property
def cxx11_flag(self):
if self.version < ver('13.1'):
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index c27a023136..8da7011b53 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -44,7 +44,6 @@ from spec import DependencyMap
from itertools import chain
from spack.config import *
-
class DefaultConcretizer(object):
"""This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the
@@ -66,16 +65,15 @@ class DefaultConcretizer(object):
if condition(spec):
return spec
return None
-
+
def _valid_virtuals_and_externals(self, spec):
"""Returns a list of spec/external-path pairs for both virtuals and externals
that can concretize this spec."""
-
# Get a list of candidate packages that could satisfy this spec
packages = []
if spec.virtual:
- providers = spack.db.providers_for(spec)
+ providers = spack.repo.providers_for(spec)
if not providers:
raise UnsatisfiableProviderSpecError(providers[0], spec)
spec_w_preferred_providers = self._find_other_spec(spec, \
@@ -85,8 +83,6 @@ class DefaultConcretizer(object):
provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name)
packages = sorted(providers, cmp=provider_cmp)
else:
- if spec.external:
- return False
packages = [spec]
# For each candidate package, if it has externals add those to the candidates
@@ -98,13 +94,23 @@ class DefaultConcretizer(object):
buildable = not is_spec_nobuild(pkg)
if buildable:
result.append((pkg, None))
- if externals:
- sorted_externals = sorted(externals, cmp=lambda a,b: a[0].__cmp__(b[0]))
- for external in sorted_externals:
- if external[0].satisfies(spec):
- result.append(external)
+ for ext in externals:
+ if ext[0].satisfies(spec):
+ result.append(ext)
if not result:
raise NoBuildError(spec)
+
+ def cmp_externals(a, b):
+ result = a[0].__cmp__(b[0])
+ if result != 0: return result
+ if not a[1] and b[1]:
+ return 1
+ if not b[1] and a[1]:
+ return -1
+ return a[1].__cmp__(b[1])
+
+ #result = sorted(result, cmp=lambda a,b: a[0].__cmp__(b[0]))
+ result = sorted(result, cmp=cmp_externals)
return result
@@ -115,7 +121,7 @@ class DefaultConcretizer(object):
if not candidates:
return False
- #Find the another spec in the dag that has a compiler. We'll use that
+ #Find the nearest spec in the dag that has a compiler. We'll use that
# spec to test compiler compatibility.
other_spec = self._find_other_spec(spec, lambda(x): x.compiler)
if not other_spec:
@@ -131,25 +137,36 @@ class DefaultConcretizer(object):
if not candidate:
#No ABI matches. Pick the top choice based on the orignal preferences.
candidate = candidates[0]
- external = candidate[1]
candidate_spec = candidate[0]
+ external = candidate[1]
+ changed = False
+
+ #If we're external then trim the dependencies
+ if external:
+ if (spec.dependencies):
+ changed = True
+ spec.dependencies = DependencyMap()
+ candidate_spec.dependencies = DependencyMap()
+
+ def fequal(candidate_field, spec_field):
+ return (not candidate_field) or (candidate_field == spec_field)
+ if fequal(candidate_spec.name, spec.name) and \
+ fequal(candidate_spec.versions, spec.versions) and \
+ fequal(candidate_spec.compiler, spec.compiler) and \
+ fequal(candidate_spec.architecture, spec.architecture) and \
+ fequal(candidate_spec.dependencies, spec.dependencies) and \
+ fequal(candidate_spec.variants, spec.variants) and \
+ fequal(external, spec.external):
+ return changed
#Refine this spec to the candidate.
- changed = False
if spec.virtual:
spec._replace_with(candidate_spec)
changed = True
if spec._dup(candidate_spec, deps=False, cleardeps=False):
changed = True
- if not spec.external and external:
- spec.external = external
- changed = True
+ spec.external = external
- #If we're external then trim the dependencies
- if external and spec.dependencies:
- changed = True
- spec.dependencies = DependencyMap()
-
return changed
@@ -286,21 +303,6 @@ class DefaultConcretizer(object):
return True # things changed.
- def choose_provider(self, package_spec, spec, providers):
- """This is invoked for virtual specs. Given a spec with a virtual name,
- say "mpi", and a list of specs of possible providers of that spec,
- select a provider and return it.
- """
- assert(spec.virtual)
- assert(providers)
-
- provider_cmp = partial(spack.pkgsort.provider_compare, package_spec.name, spec.name)
- sorted_providers = sorted(providers, cmp=provider_cmp)
- first_key = sorted_providers[0]
-
- return first_key
-
-
class UnavailableCompilerVersionError(spack.error.SpackError):
"""Raised when there is no available compiler that satisfies a
compiler spec."""
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 712a2b78fc..71900282f5 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -1,12 +1,12 @@
##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -45,11 +45,11 @@ several configuration files, such as compilers.yaml or mirrors.yaml.
Configuration file format
===============================
-Configuration files are formatted using YAML syntax.
-This format is implemented by Python's
-yaml class, and it's easy to read and versatile.
+Configuration files are formatted using YAML syntax. This format is
+implemented by libyaml (included with Spack as an external module),
+and it's easy to read and versatile.
-The config files are structured as trees, like this ``compiler`` section::
+Config files are structured as trees, like this ``compiler`` section::
compilers:
chaos_5_x86_64_ib:
@@ -67,336 +67,539 @@ In this example, entries like ''compilers'' and ''xlc@12.1'' are used to
categorize entries beneath them in the tree. At the root of the tree,
entries like ''cc'' and ''cxx'' are specified as name/value pairs.
-Spack returns these trees as nested dicts. The dict for the above example
-would looks like:
-
- { 'compilers' :
- { 'chaos_5_x86_64_ib' :
- { 'gcc@4.4.7' :
- { 'cc' : '/usr/bin/gcc',
- 'cxx' : '/usr/bin/g++'
- 'f77' : '/usr/bin/gfortran'
- 'fc' : '/usr/bin/gfortran' }
- }
- { 'bgqos_0' :
- { 'cc' : '/usr/local/bin/mpixlc' }
- }
- }
-
-Some routines, like get_mirrors_config and get_compilers_config may strip
-off the top-levels of the tree and return subtrees.
+``config.get_config()`` returns these trees as nested dicts, but it
+strips the first level off. So, ``config.get_config('compilers')``
+would return something like this for the above example:
+
+ { 'chaos_5_x86_64_ib' :
+ { 'gcc@4.4.7' :
+ { 'cc' : '/usr/bin/gcc',
+ 'cxx' : '/usr/bin/g++'
+ 'f77' : '/usr/bin/gfortran'
+ 'fc' : '/usr/bin/gfortran' }
+ }
+ { 'bgqos_0' :
+ { 'cc' : '/usr/local/bin/mpixlc' } }
+
+Likewise, the ``mirrors.yaml`` file's first line must be ``mirrors:``,
+but ``get_config()`` strips that off too.
+
+Precedence
+===============================
+
+``config.py`` routines attempt to recursively merge configuration
+across scopes. So if there are ``compilers.py`` files in both the
+site scope and the user scope, ``get_config('compilers')`` will return
+merged dictionaries of *all* the compilers available. If a user
+compiler conflicts with a site compiler, Spack will overwrite the site
+configuration wtih the user configuration. If both the user and site
+``mirrors.yaml`` files contain lists of mirrors, then ``get_config()``
+will return a concatenated list of mirrors, with the user config items
+first.
+
+Sometimes, it is useful to *completely* override a site setting with a
+user one. To accomplish this, you can use *two* colons at the end of
+a key in a configuration file. For example, this:
+
+ compilers::
+ chaos_5_x86_64_ib:
+ gcc@4.4.7:
+ cc: /usr/bin/gcc
+ cxx: /usr/bin/g++
+ f77: /usr/bin/gfortran
+ fc: /usr/bin/gfortran
+ bgqos_0:
+ xlc@12.1:
+ cc: /usr/local/bin/mpixlc
+ ...
+
+Will make Spack take compilers *only* from the user configuration, and
+the site configuration will be ignored.
+
"""
import os
-import exceptions
+import re
import sys
import copy
-import inspect
-import glob
-import imp
-import spack.spec
-import spack.error
-from llnl.util.lang import memoized
-
-from external import yaml
-from external.yaml.error import MarkedYAMLError
+import jsonschema
+from jsonschema import Draft4Validator, validators
+import yaml
+from yaml.error import MarkedYAMLError
+from ordereddict_backport import OrderedDict
+
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
import copy
-_config_sections = {}
-class _ConfigCategory:
- name = None
- filename = None
- merge = True
- def __init__(self, n, f, m):
- self.name = n
- self.filename = f
- self.merge = m
- self.files_read_from = []
- self.result_dict = {}
- _config_sections[n] = self
-
-_ConfigCategory('compilers', 'compilers.yaml', True)
-_ConfigCategory('mirrors', 'mirrors.yaml', True)
-_ConfigCategory('preferred', 'preferred.yaml', True)
-_ConfigCategory('view', 'views.yaml', True)
-_ConfigCategory('preferred', 'preferred.yaml', True)
-_ConfigCategory('packages', 'packages.yaml', True)
-
-"""Names of scopes and their corresponding configuration files."""
-config_scopes = [('site', os.path.join(spack.etc_path, 'spack')),
- ('user', os.path.expanduser('~/.spack'))]
-
-_compiler_by_arch = {}
-_read_config_file_result = {}
-def _read_config_file(filename):
- """Read a given YAML configuration file"""
- global _read_config_file_result
- if filename in _read_config_file_result:
- return _read_config_file_result[filename]
+import spack
+from spack.error import SpackError
+
+# Hacked yaml for configuration files preserves line numbers.
+import spack.util.spack_yaml as syaml
+
+
+"""Dict from section names -> schema for that section."""
+section_schemas = {
+ 'compilers': {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack compiler configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ 'compilers:?': { # optional colon for overriding site config.
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': { # architecture
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*@\w[\w-]*': { # compiler spec
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'required': ['cc', 'cxx', 'f77', 'fc'],
+ 'properties': {
+ 'cc': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'cxx': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'f77': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'fc': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ },},},},},},},},
+
+ 'mirrors': {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack mirror configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'mirrors:?': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': {
+ 'type': 'string'},},},},},
+
+ 'repos': {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack repository configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'repos:?': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'},},},},
+ 'packages': {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack package configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'packages:?': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': { # package name
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'version': {
+ 'type' : 'array',
+ 'default' : [],
+ 'items' : { 'type' : 'string' } }, #version strings
+ 'compiler': {
+ 'type' : 'array',
+ 'default' : [],
+ 'items' : { 'type' : 'string' } }, #compiler specs
+ 'nobuild': {
+ 'type': 'boolean',
+ 'default': False,
+ },
+ 'paths': {
+ 'type' : 'object',
+ 'default' : {},
+ }
+ },},},},},}
+}
+
+"""OrderedDict of config scopes keyed by name.
+ Later scopes will override earlier scopes.
+"""
+config_scopes = OrderedDict()
+
+
+def validate_section_name(section):
+ """Raise a ValueError if the section is not a valid section."""
+ if section not in section_schemas:
+ raise ValueError("Invalid config section: '%s'. Options are %s."
+ % (section, section_schemas))
+
+
+def extend_with_default(validator_class):
+ """Add support for the 'default' attribute for properties and patternProperties.
+
+ jsonschema does not handle this out of the box -- it only
+ validates. This allows us to set default values for configs
+ where certain fields are `None` b/c they're deleted or
+ commented out.
+
+ """
+ validate_properties = validator_class.VALIDATORS["properties"]
+ validate_pattern_properties = validator_class.VALIDATORS["patternProperties"]
+
+ def set_defaults(validator, properties, instance, schema):
+ for property, subschema in properties.iteritems():
+ if "default" in subschema:
+ instance.setdefault(property, subschema["default"])
+ for err in validate_properties(validator, properties, instance, schema):
+ yield err
+
+ def set_pp_defaults(validator, properties, instance, schema):
+ for property, subschema in properties.iteritems():
+ if "default" in subschema:
+ if isinstance(instance, dict):
+ for key, val in instance.iteritems():
+ if re.match(property, key) and val is None:
+ instance[key] = subschema["default"]
+
+ for err in validate_pattern_properties(validator, properties, instance, schema):
+ yield err
+
+ return validators.extend(validator_class, {
+ "properties" : set_defaults,
+ "patternProperties" : set_pp_defaults
+ })
+
+DefaultSettingValidator = extend_with_default(Draft4Validator)
+
+def validate_section(data, schema):
+ """Validate data read in from a Spack YAML file.
+
+ This leverages the line information (start_mark, end_mark) stored
+ on Spack YAML structures.
+
+ """
try:
+ DefaultSettingValidator(schema).validate(data)
+ except jsonschema.ValidationError as e:
+ raise ConfigFormatError(e, data)
+
+
+class ConfigScope(object):
+ """This class represents a configuration scope.
+
+ A scope is one directory containing named configuration files.
+ Each file is a config "section" (e.g., mirrors, compilers, etc).
+ """
+
+ def __init__(self, name, path):
+ self.name = name # scope name.
+ self.path = path # path to directory containing configs.
+ self.sections = {} # sections read from config files.
+
+ # Register in a dict of all ConfigScopes
+ # TODO: make this cleaner. Mocking up for testing is brittle.
+ global config_scopes
+ config_scopes[name] = self
+
+ def get_section_filename(self, section):
+ validate_section_name(section)
+ return os.path.join(self.path, "%s.yaml" % section)
+
+
+ def get_section(self, section):
+ if not section in self.sections:
+ path = self.get_section_filename(section)
+ schema = section_schemas[section]
+ data = _read_config_file(path, schema)
+ self.sections[section] = data
+ return self.sections[section]
+
+
+ def write_section(self, section):
+ filename = self.get_section_filename(section)
+ data = self.get_section(section)
+ try:
+ mkdirp(self.path)
+ with open(filename, 'w') as f:
+ validate_section(data, section_schemas[section])
+ syaml.dump(data, stream=f, default_flow_style=False)
+ except jsonschema.ValidationError as e:
+ raise ConfigSanityError(e, data)
+ except (yaml.YAMLError, IOError) as e:
+ raise ConfigFileError("Error writing to config file: '%s'" % str(e))
+
+
+ def clear(self):
+ """Empty cached config information."""
+ self.sections = {}
+
+
+ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
+ConfigScope('user', os.path.expanduser('~/.spack'))
+
+
+def highest_precedence_scope():
+ """Get the scope with highest precedence (prefs will override others)."""
+ return config_scopes.values()[-1]
+
+
+def validate_scope(scope):
+ """Ensure that scope is valid, and return a valid scope if it is None.
+
+ This should be used by routines in ``config.py`` to validate
+ scope name arguments, and to determine a default scope where no
+ scope is specified.
+
+ """
+ if scope is None:
+ # default to the scope with highest precedence.
+ return highest_precedence_scope()
+
+ elif scope in config_scopes:
+ return config_scopes[scope]
+
+ else:
+ raise ValueError("Invalid config scope: '%s'. Must be one of %s."
+ % (scope, config_scopes.keys()))
+
+
+def _read_config_file(filename, schema):
+ """Read a YAML configuration file."""
+ # Ignore nonexisting files.
+ if not os.path.exists(filename):
+ return None
+
+ elif not os.path.isfile(filename):
+ raise ConfigFileError(
+ "Invlaid configuration. %s exists but is not a file." % filename)
+
+ elif not os.access(filename, os.R_OK):
+ raise ConfigFileError("Config file is not readable: %s." % filename)
+
+ try:
+ tty.debug("Reading config file %s" % filename)
with open(filename) as f:
- ydict = yaml.load(f)
+ data = syaml.load(f)
+
+ if data:
+ validate_section(data, schema)
+ return data
+
except MarkedYAMLError, e:
- tty.die("Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
- except exceptions.IOError, e:
- _read_config_file_result[filename] = None
- return None
- _read_config_file_result[filename] = ydict
- return ydict
+ raise ConfigFileError(
+ "Error parsing yaml%s: %s" % (str(e.context_mark), e.problem))
+
+ except IOError, e:
+ raise ConfigFileError(
+ "Error reading configuration file %s: %s" % (filename, str(e)))
def clear_config_caches():
"""Clears the caches for configuration files, which will cause them
to be re-read upon the next request"""
- for key,s in _config_sections.iteritems():
- s.files_read_from = []
- s.result_dict = {}
- spack.config._read_config_file_result = {}
- spack.config._compiler_by_arch = {}
- spack.compilers._cached_default_compiler = None
-
-
-def _merge_dicts(d1, d2):
- """Recursively merges two configuration trees, with entries
- in d2 taking precedence over d1"""
- if not d1:
- return copy.copy(d2)
- if not d2:
- return d1
-
- if (type(d1) is list) and (type(d2) is list):
- d1.extend(d2)
- return d1
-
- if (type(d1) is dict) and (type(d2) is dict):
- for key2, val2 in d2.iteritems():
- if not key2 in d1:
- d1[key2] = val2
- elif type(d1[key2]) is dict and type(val2) is dict:
- d1[key2] = _merge_dicts(d1[key2], val2)
- elif (type(d1) is list) and (type(d2) is list):
- d1.extend(d2)
+ for scope in config_scopes.values():
+ scope.clear()
+
+
+def _merge_yaml(dest, source):
+ """Merges source into dest; entries in source take precedence over dest.
+
+ This routine may modify dest and should be assigned to dest, in
+ case dest was None to begin with, e.g.:
+
+ dest = _merge_yaml(dest, source)
+
+ Config file authors can optionally end any attribute in a dict
+ with `::` instead of `:`, and the key will override that of the
+ parent instead of merging.
+
+ """
+ def they_are(t):
+ return isinstance(dest, t) and isinstance(source, t)
+
+ # If both are None, handle specially and return None.
+ if source is None and dest is None:
+ return None
+
+ # If source is None, overwrite with source.
+ elif source is None:
+ return None
+
+ # Source list is prepended (for precedence)
+ if they_are(list):
+ seen = set(source)
+ dest[:] = source + [x for x in dest if x not in seen]
+ return dest
+
+ # Source dict is merged into dest.
+ elif they_are(dict):
+ for sk, sv in source.iteritems():
+ if not sk in dest:
+ dest[sk] = copy.copy(sv)
else:
- d1[key2] = val2
- return d1
+ dest[sk] = _merge_yaml(dest[sk], source[sk])
+ return dest
+
+ # In any other case, overwrite with a copy of the source value.
+ else:
+ return copy.copy(source)
+
- return d2
+def get_config(section, scope=None):
+ """Get configuration settings for a section.
+ Strips off the top-level section name from the YAML dict.
+ """
+ validate_section_name(section)
+ merged_section = syaml.syaml_dict()
+
+ if scope is None:
+ scopes = config_scopes.values()
+ else:
+ scopes = [validate_scope(scope)]
-def get_config(category_name):
- """Get the confguration tree for the names category. Strips off the
- top-level category entry from the dict"""
- global config_scopes
- category = _config_sections[category_name]
- if category.result_dict:
- return category.result_dict
+ for scope in scopes:
+ # read potentially cached data from the scope.
+ data = scope.get_section(section)
- category.result_dict = {}
- for scope, scope_path in config_scopes:
- path = os.path.join(scope_path, category.filename)
- result = _read_config_file(path)
- if not result:
+ # Skip empty configs
+ if not data or not isinstance(data, dict):
continue
- if not category_name in result:
+
+ # Allow complete override of site config with '<section>::'
+ override_key = section + ':'
+ if not (section in data or override_key in data):
+ tty.warn("Skipping bad configuration file: '%s'" % scope.path)
continue
- category.files_read_from.insert(0, path)
- result = result[category_name]
- if category.merge:
- category.result_dict = _merge_dicts(category.result_dict, result)
+
+ if override_key in data:
+ merged_section = data[override_key]
else:
- category.result_dict = result
- return category.result_dict
-
-
-def get_compilers_config(arch=None):
- """Get the compiler configuration from config files for the given
- architecture. Strips off the architecture component of the
- configuration"""
- global _compiler_by_arch
- if not arch:
- arch = spack.architecture.sys_type()
- if arch in _compiler_by_arch:
- return _compiler_by_arch[arch]
-
- cc_config = get_config('compilers')
- if arch in cc_config and 'all' in cc_config:
- arch_compiler = dict(cc_config[arch])
- _compiler_by_arch[arch] = _merge_dict(arch_compiler, cc_config['all'])
- elif arch in cc_config:
- _compiler_by_arch[arch] = cc_config[arch]
- elif 'all' in cc_config:
- _compiler_by_arch[arch] = cc_config['all']
- else:
- _compiler_by_arch[arch] = {}
- return _compiler_by_arch[arch]
+ merged_section = _merge_yaml(merged_section, data[section])
+ return merged_section
-def get_mirror_config():
- """Get the mirror configuration from config files as a list of name/location tuples"""
- return [x.items()[0] for x in get_config('mirrors')]
+def get_config_filename(scope, section):
+ """For some scope and section, get the name of the configuration file"""
+ scope = validate_scope(scope)
+ return scope.get_section_filename(section)
-def get_preferred_config():
- """Get the preferred configuration from config files"""
- return get_config('preferred')
+def update_config(section, update_data, scope=None):
+ """Update the configuration file for a particular scope.
-@memoized
-def get_packages_config():
- """Get the externals configuration from config files"""
- package_config = get_config('packages')
- if not package_config:
- return {}
- indexed_packages = {}
- for p in package_config:
- package_name = spack.spec.Spec(p.keys()[0]).name
- if package_name not in indexed_packages:
- indexed_packages[package_name] = []
- indexed_packages[package_name].append({ spack.spec.Spec(key) : val for key, val in p.iteritems() })
- return indexed_packages
+ Overwrites contents of a section in a scope with update_data,
+ then writes out the config file.
+ update_data should have the top-level section name stripped off
+ (it will be re-added). Data itself can be a list, dict, or any
+ other yaml-ish structure.
+
+ """
+ # read in the config to ensure we've got current data
+ get_config(section)
+
+ validate_section_name(section) # validate section name
+ scope = validate_scope(scope) # get ConfigScope object from string.
+
+ # read only the requested section's data.
+ scope.sections[section] = { section : update_data }
+ scope.write_section(section)
-def is_spec_nobuild(spec):
- """Return true if the spec pkgspec is configured as nobuild"""
- allpkgs = get_packages_config()
- name = spec.name
- if not name in allpkgs:
- return False
- for itm in allpkgs[name]:
- for pkg,conf in itm.iteritems():
- if pkg.satisfies(spec):
- if conf.get('nobuild', False):
- return True
- return False
+
+def print_section(section):
+ """Print a configuration to stdout."""
+ try:
+ data = syaml.syaml_dict()
+ data[section] = get_config(section)
+ syaml.dump(data, stream=sys.stdout, default_flow_style=False)
+ except (yaml.YAMLError, IOError) as e:
+ raise ConfigError("Error reading configuration: %s" % section)
def spec_externals(spec):
"""Return a list of spec, directory pairs for each external location for spec"""
- allpkgs = get_packages_config()
+ allpkgs = get_config('packages')
name = spec.name
spec_locations = []
- if not name in allpkgs:
+ pkg_paths = allpkgs.get(name, {}).get('paths', None)
+ if not pkg_paths:
return []
- for itm in allpkgs[name]:
- for pkg,conf in itm.iteritems():
- if not pkg.satisfies(spec):
- continue
- path = conf.get('path', None)
- if not path:
- continue
- spec_locations.append( (pkg, path) )
- return spec_locations
-
-
-def get_config_scope_dirname(scope):
- """For a scope return the config directory"""
- global config_scopes
- for s,p in config_scopes:
- if s == scope:
- return p
- tty.die("Unknown scope %s. Valid options are %s" %
- (scope, ", ".join([s for s,p in config_scopes])))
-
-
-def get_config_scope_filename(scope, category_name):
- """For some scope and category, get the name of the configuration file"""
- if not category_name in _config_sections:
- tty.die("Unknown config category %s. Valid options are: %s" %
- (category_name, ", ".join([s for s in _config_sections])))
- return os.path.join(get_config_scope_dirname(scope), _config_sections[category_name].filename)
-
-
-def add_to_config(category_name, addition_dict, scope=None):
- """Merge a new dict into a configuration tree and write the new
- configuration to disk"""
- global _read_config_file_result
- get_config(category_name)
- category = _config_sections[category_name]
-
- #If scope is specified, use it. Otherwise use the last config scope that
- #we successfully parsed data from.
- file = None
- path = None
- if not scope and not category.files_read_from:
- scope = 'user'
- if scope:
- try:
- dir = get_config_scope_dirname(scope)
- if not os.path.exists(dir):
- mkdirp(dir)
- path = os.path.join(dir, category.filename)
- file = open(path, 'w')
- except exceptions.IOError, e:
- pass
- else:
- for p in category.files_read_from:
- try:
- file = open(p, 'w')
- except exceptions.IOError, e:
- pass
- if file:
- path = p
- break;
- if not file:
- tty.die('Unable to write to config file %s' % path)
-
- #Merge the new information into the existing file info, then write to disk
- new_dict = _read_config_file_result[path]
- if new_dict and category_name in new_dict:
- new_dict = new_dict[category_name]
- new_dict = _merge_dicts(new_dict, addition_dict)
- new_dict = { category_name : new_dict }
- _read_config_file_result[path] = new_dict
- yaml.dump(new_dict, stream=file, default_flow_style=False)
- file.close()
-
- #Merge the new information into the cached results
- category.result_dict = _merge_dicts(category.result_dict, addition_dict)
-
-
-def add_to_mirror_config(addition_dict, scope=None):
- """Add mirrors to the configuration files"""
- add_to_config('mirrors', addition_dict, scope)
-
-
-def add_to_compiler_config(addition_dict, scope=None, arch=None):
- """Add compilers to the configuration files"""
- if not arch:
- arch = spack.architecture.sys_type()
- add_to_config('compilers', { arch : addition_dict }, scope)
- clear_config_caches()
-
-
-def remove_from_config(category_name, key_to_rm, scope=None):
- """Remove a configuration key and write a new configuration to disk"""
- global config_scopes
- get_config(category_name)
- scopes_to_rm_from = [scope] if scope else [s for s,p in config_scopes]
- category = _config_sections[category_name]
-
- rmd_something = False
- for s in scopes_to_rm_from:
- path = get_config_scope_filename(scope, category_name)
- result = _read_config_file(path)
- if not result:
+ for pkg,path in pkg_paths.iteritems():
+ if not spec.satisfies(pkg):
continue
- if not key_to_rm in result[category_name]:
+ if not path:
continue
- with open(path, 'w') as f:
- result[category_name].pop(key_to_rm, None)
- yaml.dump(result, stream=f, default_flow_style=False)
- category.result_dict.pop(key_to_rm, None)
- rmd_something = True
- return rmd_something
-
-
-"""Print a configuration to stdout"""
-def print_category(category_name):
- if not category_name in _config_sections:
- tty.die("Unknown config category %s. Valid options are: %s" %
- (category_name, ", ".join([s for s in _config_sections])))
- yaml.dump(get_config(category_name), stream=sys.stdout, default_flow_style=False)
+ spec_locations.append( (spack.spec.Spec(pkg), path) )
+ return spec_locations
+
+
+def is_spec_nobuild(spec):
+ """Return true if the spec pkgspec is configured as nobuild"""
+ allpkgs = get_config('packages')
+ name = spec.name
+ if not spec.name in allpkgs:
+ return False
+ if not 'nobuild' in allpkgs[spec.name]:
+ return False
+ return allpkgs[spec.name]['nobuild']
+
+
+class ConfigError(SpackError): pass
+class ConfigFileError(ConfigError): pass
+
+def get_path(path, data):
+ if path:
+ return get_path(path[1:], data[path[0]])
+ else:
+ return data
+
+class ConfigFormatError(ConfigError):
+ """Raised when a configuration format does not match its schema."""
+ def __init__(self, validation_error, data):
+ # Try to get line number from erroneous instance and its parent
+ instance_mark = getattr(validation_error.instance, '_start_mark', None)
+ parent_mark = getattr(validation_error.parent, '_start_mark', None)
+ path = getattr(validation_error, 'path', None)
+
+ # Try really hard to get the parent (which sometimes is not
+ # set) This digs it out of the validated structure if it's not
+ # on the validation_error.
+ if path and not parent_mark:
+ parent_path = list(path)[:-1]
+ parent = get_path(parent_path, data)
+ if path[-1] in parent:
+ if isinstance(parent, dict):
+ keylist = parent.keys()
+ elif isinstance(parent, list):
+ keylist = parent
+ idx = keylist.index(path[-1])
+ parent_mark = getattr(keylist[idx], '_start_mark', None)
+
+ if instance_mark:
+ location = '%s:%d' % (instance_mark.name, instance_mark.line + 1)
+ elif parent_mark:
+ location = '%s:%d' % (parent_mark.name, parent_mark.line + 1)
+ elif path:
+ location = 'At ' + ':'.join(path)
+ else:
+ location = '<unknown line>'
+
+ message = '%s: %s' % (location, validation_error.message)
+ super(ConfigError, self).__init__(message)
+
+class ConfigSanityError(ConfigFormatError):
+ """Same as ConfigFormatError, raised when config is written by Spack."""
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
new file mode 100644
index 0000000000..9cbe7de44a
--- /dev/null
+++ b/lib/spack/spack/database.py
@@ -0,0 +1,632 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Spack's installation tracking database.
+
+The database serves two purposes:
+
+ 1. It implements a cache on top of a potentially very large Spack
+ directory hierarchy, speeding up many operations that would
+ otherwise require filesystem access.
+
+ 2. It will allow us to track external installations as well as lost
+ packages and their dependencies.
+
+Prior ot the implementation of this store, a direcotry layout served
+as the authoritative database of packages in Spack. This module
+provides a cache and a sanity checking mechanism for what is in the
+filesystem.
+
+"""
+import os
+import time
+import socket
+
+import yaml
+from yaml.error import MarkedYAMLError, YAMLError
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import *
+from llnl.util.lock import *
+
+import spack.spec
+from spack.version import Version
+from spack.spec import Spec
+from spack.error import SpackError
+from spack.repository import UnknownPackageError
+
+# DB goes in this directory underneath the root
+_db_dirname = '.spack-db'
+
+# DB version. This is stuck in the DB file to track changes in format.
+_db_version = Version('0.9')
+
+# Default timeout for spack database locks is 5 min.
+_db_lock_timeout = 60
+
+
+def _autospec(function):
+ """Decorator that automatically converts the argument of a single-arg
+ function to a Spec."""
+ def converter(self, spec_like, *args, **kwargs):
+ if not isinstance(spec_like, spack.spec.Spec):
+ spec_like = spack.spec.Spec(spec_like)
+ return function(self, spec_like, *args, **kwargs)
+ return converter
+
+
+class InstallRecord(object):
+ """A record represents one installation in the DB.
+
+ The record keeps track of the spec for the installation, its
+ install path, AND whether or not it is installed. We need the
+ installed flag in case a user either:
+
+ a) blew away a directory, or
+ b) used spack uninstall -f to get rid of it
+
+ If, in either case, the package was removed but others still
+ depend on it, we still need to track its spec, so we don't
+ actually remove from the database until a spec has no installed
+ dependents left.
+
+ """
+ def __init__(self, spec, path, installed, ref_count=0):
+ self.spec = spec
+ self.path = str(path)
+ self.installed = bool(installed)
+ self.ref_count = ref_count
+
+ def to_dict(self):
+ return { 'spec' : self.spec.to_node_dict(),
+ 'path' : self.path,
+ 'installed' : self.installed,
+ 'ref_count' : self.ref_count }
+
+ @classmethod
+ def from_dict(cls, spec, dictionary):
+ d = dictionary
+ return InstallRecord(spec, d['path'], d['installed'], d['ref_count'])
+
+
+class Database(object):
+ def __init__(self, root, db_dir=None):
+ """Create a Database for Spack installations under ``root``.
+
+ A Database is a cache of Specs data from ``$prefix/spec.yaml``
+ files in Spack installation directories.
+
+ By default, Database files (data and lock files) are stored
+ under ``root/.spack-db``, which is created if it does not
+ exist. This is the ``db_dir``.
+
+ The Database will attempt to read an ``index.yaml`` file in
+ ``db_dir``. If it does not find one, it will be created when
+ needed by scanning the entire Database root for ``spec.yaml``
+ files according to Spack's ``DirectoryLayout``.
+
+ Caller may optionally provide a custom ``db_dir`` parameter
+ where data will be stored. This is intended to be used for
+ testing the Database class.
+
+ """
+ self.root = root
+
+ if db_dir is None:
+ # If the db_dir is not provided, default to within the db root.
+ self._db_dir = join_path(self.root, _db_dirname)
+ else:
+ # Allow customizing the database directory location for testing.
+ self._db_dir = db_dir
+
+ # Set up layout of database files within the db dir
+ self._index_path = join_path(self._db_dir, 'index.yaml')
+ self._lock_path = join_path(self._db_dir, 'lock')
+
+ # Create needed directories and files
+ if not os.path.exists(self._db_dir):
+ mkdirp(self._db_dir)
+
+ if not os.path.exists(self._lock_path):
+ touch(self._lock_path)
+
+ # initialize rest of state.
+ self.lock = Lock(self._lock_path)
+ self._data = {}
+
+
+ def write_transaction(self, timeout=_db_lock_timeout):
+ """Get a write lock context manager for use in a `with` block."""
+ return WriteTransaction(self, self._read, self._write, timeout)
+
+
+ def read_transaction(self, timeout=_db_lock_timeout):
+ """Get a read lock context manager for use in a `with` block."""
+ return ReadTransaction(self, self._read, None, timeout)
+
+
+ def _write_to_yaml(self, stream):
+ """Write out the databsae to a YAML file.
+
+ This function does not do any locking or transactions.
+ """
+ # map from per-spec hash code to installation record.
+ installs = dict((k, v.to_dict()) for k, v in self._data.items())
+
+ # database includes installation list and version.
+
+ # NOTE: this DB version does not handle multiple installs of
+ # the same spec well. If there are 2 identical specs with
+ # different paths, it can't differentiate.
+ # TODO: fix this before we support multiple install locations.
+ database = {
+ 'database' : {
+ 'installs' : installs,
+ 'version' : str(_db_version)
+ }
+ }
+
+ try:
+ return yaml.dump(database, stream=stream, default_flow_style=False)
+ except YAMLError as e:
+ raise SpackYAMLError("error writing YAML database:", str(e))
+
+
+ def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
+ """Recursively construct a spec from a hash in a YAML database.
+
+ Does not do any locking.
+ """
+ if hash_key not in installs:
+ parent = read_spec(installs[parent_key]['path'])
+
+ spec_dict = installs[hash_key]['spec']
+
+ # Build spec from dict first.
+ spec = Spec.from_node_dict(spec_dict)
+
+ # Add dependencies from other records in the install DB to
+ # form a full spec.
+ for dep_hash in spec_dict[spec.name]['dependencies'].values():
+ child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
+ spec._add_dependency(child)
+
+ # Specs from the database need to be marked concrete because
+ # they represent actual installations.
+ spec._mark_concrete()
+ return spec
+
+
+ def _read_from_yaml(self, stream):
+ """
+ Fill database from YAML, do not maintain old data
+ Translate the spec portions from node-dict form to spec form
+
+ Does not do any locking.
+ """
+ try:
+ if isinstance(stream, basestring):
+ with open(stream, 'r') as f:
+ yfile = yaml.load(f)
+ else:
+ yfile = yaml.load(stream)
+
+ except MarkedYAMLError as e:
+ raise SpackYAMLError("error parsing YAML database:", str(e))
+
+ if yfile is None:
+ return
+
+ def check(cond, msg):
+ if not cond: raise CorruptDatabaseError(self._index_path, msg)
+
+ check('database' in yfile, "No 'database' attribute in YAML.")
+
+ # High-level file checks
+ db = yfile['database']
+ check('installs' in db, "No 'installs' in YAML DB.")
+ check('version' in db, "No 'version' in YAML DB.")
+
+ # TODO: better version checking semantics.
+ version = Version(db['version'])
+ if version != _db_version:
+ raise InvalidDatabaseVersionError(_db_version, version)
+
+ # Iterate through database and check each record.
+ installs = db['installs']
+ data = {}
+ for hash_key, rec in installs.items():
+ try:
+ # This constructs a spec DAG from the list of all installs
+ spec = self._read_spec_from_yaml(hash_key, installs)
+
+ # Validate the spec by ensuring the stored and actual
+ # hashes are the same.
+ spec_hash = spec.dag_hash()
+ if not spec_hash == hash_key:
+ tty.warn("Hash mismatch in database: %s -> spec with hash %s"
+ % (hash_key, spec_hash))
+ continue # TODO: is skipping the right thing to do?
+
+ # Insert the brand new spec in the database. Each
+ # spec has its own copies of its dependency specs.
+ # TODO: would a more immmutable spec implementation simplify this?
+ data[hash_key] = InstallRecord.from_dict(spec, rec)
+
+ except Exception as e:
+ tty.warn("Invalid database reecord:",
+ "file: %s" % self._index_path,
+ "hash: %s" % hash_key,
+ "cause: %s" % str(e))
+ raise
+
+ self._data = data
+
+
+ def reindex(self, directory_layout):
+ """Build database index from scratch based from a directory layout.
+
+ Locks the DB if it isn't locked already.
+
+ """
+ with self.write_transaction():
+ old_data = self._data
+ try:
+ self._data = {}
+
+ # Ask the directory layout to traverse the filesystem.
+ for spec in directory_layout.all_specs():
+ # Create a spec for each known package and add it.
+ path = directory_layout.path_for_spec(spec)
+ self._add(spec, path, directory_layout)
+
+ self._check_ref_counts()
+
+ except:
+ # If anything explodes, restore old data, skip write.
+ self._data = old_data
+ raise
+
+
+ def _check_ref_counts(self):
+ """Ensure consistency of reference counts in the DB.
+
+ Raise an AssertionError if something is amiss.
+
+ Does no locking.
+ """
+ counts = {}
+ for key, rec in self._data.items():
+ counts.setdefault(key, 0)
+ for dep in rec.spec.dependencies.values():
+ dep_key = dep.dag_hash()
+ counts.setdefault(dep_key, 0)
+ counts[dep_key] += 1
+
+ for rec in self._data.values():
+ key = rec.spec.dag_hash()
+ expected = counts[key]
+ found = rec.ref_count
+ if not expected == found:
+ raise AssertionError(
+ "Invalid ref_count: %s: %d (expected %d), in DB %s."
+ % (key, found, expected, self._index_path))
+
+
+ def _write(self):
+ """Write the in-memory database index to its file path.
+
+ Does no locking.
+
+ """
+ temp_file = self._index_path + (
+ '.%s.%s.temp' % (socket.getfqdn(), os.getpid()))
+
+ # Write a temporary database file them move it into place
+ try:
+ with open(temp_file, 'w') as f:
+ self._write_to_yaml(f)
+ os.rename(temp_file, self._index_path)
+ except:
+ # Clean up temp file if something goes wrong.
+ if os.path.exists(temp_file):
+ os.remove(temp_file)
+ raise
+
+
+ def _read(self):
+ """Re-read Database from the data in the set location.
+
+ This does no locking.
+ """
+ if os.path.isfile(self._index_path):
+ # Read from YAML file if a database exists
+ self._read_from_yaml(self._index_path)
+
+ else:
+ # The file doesn't exist, try to traverse the directory.
+ # reindex() takes its own write lock, so no lock here.
+ self.reindex(spack.install_layout)
+
+
+ def _add(self, spec, path, directory_layout=None):
+ """Add an install record for spec at path to the database.
+
+ This assumes that the spec is not already installed. It
+ updates the ref counts on dependencies of the spec in the DB.
+
+ This operation is in-memory, and does not lock the DB.
+
+ """
+ key = spec.dag_hash()
+ if key in self._data:
+ rec = self._data[key]
+ rec.installed = True
+
+ # TODO: this overwrites a previous install path (when path !=
+ # self._data[key].path), and the old path still has a
+ # dependent in the DB. We could consider re-RPATH-ing the
+ # dependents. This case is probably infrequent and may not be
+ # worth fixing, but this is where we can discover it.
+ rec.path = path
+
+ else:
+ self._data[key] = InstallRecord(spec, path, True)
+ for dep in spec.dependencies.values():
+ self._increment_ref_count(dep, directory_layout)
+
+
+ def _increment_ref_count(self, spec, directory_layout=None):
+ """Recursively examine dependencies and update their DB entries."""
+ key = spec.dag_hash()
+ if key not in self._data:
+ installed = False
+ path = None
+ if directory_layout:
+ path = directory_layout.path_for_spec(spec)
+ installed = os.path.isdir(path)
+
+ self._data[key] = InstallRecord(spec.copy(), path, installed)
+
+ for dep in spec.dependencies.values():
+ self._increment_ref_count(dep)
+
+ self._data[key].ref_count += 1
+
+ @_autospec
+ def add(self, spec, path):
+ """Add spec at path to database, locking and reading DB to sync.
+
+ ``add()`` will lock and read from the DB on disk.
+
+ """
+ # TODO: ensure that spec is concrete?
+ # Entire add is transactional.
+ with self.write_transaction():
+ self._add(spec, path)
+
+
+ def _get_matching_spec_key(self, spec, **kwargs):
+ """Get the exact spec OR get a single spec that matches."""
+ key = spec.dag_hash()
+ if not key in self._data:
+ match = self.query_one(spec, **kwargs)
+ if match:
+ return match.dag_hash()
+ raise KeyError("No such spec in database! %s" % spec)
+ return key
+
+
+ @_autospec
+ def get_record(self, spec, **kwargs):
+ key = self._get_matching_spec_key(spec, **kwargs)
+ return self._data[key]
+
+
+ def _decrement_ref_count(self, spec):
+ key = spec.dag_hash()
+
+ if not key in self._data:
+ # TODO: print something here? DB is corrupt, but
+ # not much we can do.
+ return
+
+ rec = self._data[key]
+ rec.ref_count -= 1
+
+ if rec.ref_count == 0 and not rec.installed:
+ del self._data[key]
+ for dep in spec.dependencies.values():
+ self._decrement_ref_count(dep)
+
+
+ def _remove(self, spec):
+ """Non-locking version of remove(); does real work.
+ """
+ key = self._get_matching_spec_key(spec)
+ rec = self._data[key]
+
+ if rec.ref_count > 0:
+ rec.installed = False
+ return rec.spec
+
+ del self._data[key]
+ for dep in rec.spec.dependencies.values():
+ self._decrement_ref_count(dep)
+
+ # Returns the concrete spec so we know it in the case where a
+ # query spec was passed in.
+ return rec.spec
+
+
+ @_autospec
+ def remove(self, spec):
+ """Removes a spec from the database. To be called on uninstall.
+
+ Reads the database, then:
+
+ 1. Marks the spec as not installed.
+ 2. Removes the spec if it has no more dependents.
+ 3. If removed, recursively updates dependencies' ref counts
+ and removes them if they are no longer needed.
+
+ """
+ # Take a lock around the entire removal.
+ with self.write_transaction():
+ return self._remove(spec)
+
+
+ @_autospec
+ def installed_extensions_for(self, extendee_spec):
+ """
+ Return the specs of all packages that extend
+ the given spec
+ """
+ for s in self.query():
+ try:
+ if s.package.extends(extendee_spec):
+ yield s.package
+ except UnknownPackageError as e:
+ continue
+ # skips unknown packages
+ # TODO: conditional way to do this instead of catching exceptions
+
+
+ def query(self, query_spec=any, known=any, installed=True):
+ """Run a query on the database.
+
+ ``query_spec``
+ Queries iterate through specs in the database and return
+ those that satisfy the supplied ``query_spec``. If
+ query_spec is `any`, This will match all specs in the
+ database. If it is a spec, we'll evaluate
+ ``spec.satisfies(query_spec)``.
+
+ The query can be constrained by two additional attributes:
+
+ ``known``
+ Possible values: True, False, any
+
+ Specs that are "known" are those for which Spack can
+ locate a ``package.py`` file -- i.e., Spack "knows" how to
+ install them. Specs that are unknown may represent
+ packages that existed in a previous version of Spack, but
+ have since either changed their name or been removed.
+
+ ``installed``
+ Possible values: True, False, any
+
+ Specs for which a prefix exists are "installed". A spec
+ that is NOT installed will be in the database if some
+ other spec depends on it but its installation has gone
+ away since Spack installed it.
+
+ TODO: Specs are a lot like queries. Should there be a
+ wildcard spec object, and should specs have attributes
+ like installed and known that can be queried? Or are
+ these really special cases that only belong here?
+
+ """
+ with self.read_transaction():
+ results = []
+ for key, rec in self._data.items():
+ if installed is not any and rec.installed != installed:
+ continue
+ if known is not any and spack.repo.exists(rec.spec.name) != known:
+ continue
+ if query_spec is any or rec.spec.satisfies(query_spec):
+ results.append(rec.spec)
+
+ return sorted(results)
+
+
+ def query_one(self, query_spec, known=any, installed=True):
+ """Query for exactly one spec that matches the query spec.
+
+ Raises an assertion error if more than one spec matches the
+ query. Returns None if no installed package matches.
+
+ """
+ concrete_specs = self.query(query_spec, known, installed)
+ assert len(concrete_specs) <= 1
+ return concrete_specs[0] if concrete_specs else None
+
+
+ def missing(self, spec):
+ with self.read_transaction():
+ key = spec.dag_hash()
+ return key in self._data and not self._data[key].installed
+
+
+class _Transaction(object):
+ """Simple nested transaction context manager that uses a file lock.
+
+ This class can trigger actions when the lock is acquired for the
+ first time and released for the last.
+
+ Timeout for lock is customizable.
+ """
+ def __init__(self, db, acquire_fn=None, release_fn=None,
+ timeout=_db_lock_timeout):
+ self._db = db
+ self._timeout = timeout
+ self._acquire_fn = acquire_fn
+ self._release_fn = release_fn
+
+ def __enter__(self):
+ if self._enter() and self._acquire_fn:
+ self._acquire_fn()
+
+ def __exit__(self, type, value, traceback):
+ if self._exit() and self._release_fn:
+ self._release_fn()
+
+
+class ReadTransaction(_Transaction):
+ def _enter(self):
+ return self._db.lock.acquire_read(self._timeout)
+
+ def _exit(self):
+ return self._db.lock.release_read()
+
+
+class WriteTransaction(_Transaction):
+ def _enter(self):
+ return self._db.lock.acquire_write(self._timeout)
+
+ def _exit(self):
+ return self._db.lock.release_write()
+
+
+class CorruptDatabaseError(SpackError):
+ def __init__(self, path, msg=''):
+ super(CorruptDatabaseError, self).__init__(
+ "Spack database is corrupt: %s. %s" %(path, msg))
+
+
+class InvalidDatabaseVersionError(SpackError):
+ def __init__(self, expected, found):
+ super(InvalidDatabaseVersionError, self).__init__(
+ "Expected database version %s but found version %s"
+ % (expected, found))
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 9297d6dac3..0b98211cb9 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -42,15 +42,19 @@ The available directives are:
* ``extends``
* ``patch``
* ``variant``
+ * ``resource``
"""
-__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version',
- 'variant' ]
+__all__ = ['depends_on', 'extends', 'provides', 'patch', 'version',
+ 'variant', 'resource']
import re
import inspect
+import os.path
+import functools
from llnl.util.lang import *
+from llnl.util.filesystem import join_path
import spack
import spack.spec
@@ -60,7 +64,8 @@ from spack.version import Version
from spack.patch import Patch
from spack.variant import Variant
from spack.spec import Spec, parse_anonymous_spec
-
+from spack.resource import Resource
+from spack.fetch_strategy import from_kwargs
#
# This is a list of all directives, built up as they are defined in
@@ -79,8 +84,8 @@ class directive(object):
"""Decorator for Spack directives.
Spack directives allow you to modify a package while it is being
- defined, e.g. to add version or depenency information. Directives
- are one of the key pieces of Spack's package "langauge", which is
+ defined, e.g. to add version or dependency information. Directives
+ are one of the key pieces of Spack's package "language", which is
embedded in python.
Here's an example directive:
@@ -141,6 +146,7 @@ class directive(object):
def __call__(self, directive_function):
directives[directive_function.__name__] = self
+ @functools.wraps(directive_function)
def wrapped(*args, **kwargs):
pkg = DictWrapper(caller_locals())
self.ensure_dicts(pkg)
@@ -238,13 +244,10 @@ def patch(pkg, url_or_filename, level=1, when=None):
if when is None:
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
-
- if when_spec not in pkg.patches:
- pkg.patches[when_spec] = [Patch(pkg.name, url_or_filename, level)]
- else:
- # if this spec is identical to some other, then append this
- # patch to the existing list.
- pkg.patches[when_spec].append(Patch(pkg.name, url_or_filename, level))
+ cur_patches = pkg.patches.setdefault(when_spec, [])
+ # if this spec is identical to some other, then append this
+ # patch to the existing list.
+ cur_patches.append(Patch(pkg, url_or_filename, level))
@directive('variants')
@@ -261,6 +264,43 @@ def variant(pkg, name, default=False, description=""):
pkg.variants[name] = Variant(default, description)
+@directive('resources')
+def resource(pkg, **kwargs):
+ """
+ Define an external resource to be fetched and staged when building the package. Based on the keywords present in the
+ dictionary the appropriate FetchStrategy will be used for the resource. Resources are fetched and staged in their
+ own folder inside spack stage area, and then linked into the stage area of the package that needs them.
+
+ List of recognized keywords:
+
+ * 'when' : (optional) represents the condition upon which the resource is needed
+ * 'destination' : (optional) path where to link the resource. This path must be relative to the main package stage
+ area.
+ * 'placement' : (optional) gives the possibility to fine tune how the resource is linked into the main package stage
+ area.
+ """
+ when = kwargs.get('when', pkg.name)
+ destination = kwargs.get('destination', "")
+ placement = kwargs.get('placement', None)
+ # Check if the path is relative
+ if os.path.isabs(destination):
+ message = "The destination keyword of a resource directive can't be an absolute path.\n"
+ message += "\tdestination : '{dest}\n'".format(dest=destination)
+ raise RuntimeError(message)
+ # Check if the path falls within the main package stage area
+ test_path = 'stage_folder_root/'
+ normalized_destination = os.path.normpath(join_path(test_path, destination)) # Normalized absolute path
+ if test_path not in normalized_destination:
+ message = "The destination folder of a resource must fall within the main package stage directory.\n"
+ message += "\tdestination : '{dest}'\n".format(dest=destination)
+ raise RuntimeError(message)
+ when_spec = parse_anonymous_spec(when, pkg.name)
+ resources = pkg.resources.setdefault(when_spec, [])
+ fetcher = from_kwargs(**kwargs)
+ name = kwargs.get('name')
+ resources.append(Resource(name, fetcher, destination, placement))
+
+
class DirectiveError(spack.error.SpackError):
"""This is raised when something is wrong with a package directive."""
def __init__(self, directive, message):
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 83e6eb566a..d5b61ee7f0 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -29,10 +29,9 @@ import hashlib
import shutil
import glob
import tempfile
-from external import yaml
+import yaml
import llnl.util.tty as tty
-from llnl.util.lang import memoized
from llnl.util.filesystem import join_path, mkdirp
from spack.spec import Spec
@@ -221,8 +220,7 @@ class YamlDirectoryLayout(DirectoryLayout):
spec = Spec.from_yaml(f)
# Specs read from actual installations are always concrete
- spec._normal = True
- spec._concrete = True
+ spec._mark_concrete()
return spec
@@ -266,7 +264,6 @@ class YamlDirectoryLayout(DirectoryLayout):
self.write_spec(spec, spec_file_path)
- @memoized
def all_specs(self):
if not os.path.isdir(self.root):
return []
@@ -277,7 +274,6 @@ class YamlDirectoryLayout(DirectoryLayout):
return [self.read_spec(s) for s in spec_files]
- @memoized
def specs_by_hash(self):
by_hash = {}
for spec in self.all_specs():
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index bfa7951a47..0c2e7eb53c 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -55,8 +55,8 @@ class SpackError(Exception):
def __str__(self):
msg = self.message
- if self.long_message:
- msg += "\n %s" % self.long_message
+ if self._long_message:
+ msg += "\n %s" % self._long_message
return msg
class UnsupportedPlatformError(SpackError):
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index b810023c5a..337dd1e198 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -154,7 +154,7 @@ class URLFetchStrategy(FetchStrategy):
# Run curl but grab the mime type from the http headers
headers = spack.curl(
- *curl_args, return_output=True, fail_on_error=False)
+ *curl_args, output=str, fail_on_error=False)
if spack.curl.returncode != 0:
# clean up archive on failure.
@@ -190,7 +190,7 @@ class URLFetchStrategy(FetchStrategy):
if content_types and 'text/html' in content_types[-1]:
tty.warn("The contents of " + self.archive_file + " look like HTML.",
"The checksum will likely be bad. If it is, you can use",
- "'spack clean --dist' to remove the bad archive, then fix",
+ "'spack clean <package>' to remove the bad archive, then fix",
"your internet gateway issue and install again.")
if not self.archive_file:
@@ -375,7 +375,7 @@ class GitFetchStrategy(VCSFetchStrategy):
@property
def git_version(self):
- vstring = self.git('--version', return_output=True).lstrip('git version ')
+ vstring = self.git('--version', output=str).lstrip('git version ')
return Version(vstring)
@@ -518,7 +518,7 @@ class SvnFetchStrategy(VCSFetchStrategy):
def _remove_untracked_files(self):
"""Removes untracked files in an svn repository."""
- status = self.svn('status', '--no-ignore', return_output=True)
+ status = self.svn('status', '--no-ignore', output=str)
self.svn('status', '--no-ignore')
for line in status.split('\n'):
if not re.match('^[I?]', line):
@@ -634,6 +634,22 @@ def from_url(url):
return URLFetchStrategy(url)
+def from_kwargs(**kwargs):
+ """
+ Construct the appropriate FetchStrategy from the given keyword arguments.
+
+ :param kwargs: dictionary of keyword arguments
+ :return: fetcher or raise a FetchError exception
+ """
+ for fetcher in all_strategies:
+ if fetcher.matches(kwargs):
+ return fetcher(**kwargs)
+ # Raise an error in case we can't instantiate any known strategy
+ message = "Cannot instantiate any FetchStrategy"
+ long_message = message + " from the given arguments : {arguments}".format(srguments=kwargs)
+ raise FetchError(message, long_message)
+
+
def args_are_for(args, fetcher):
fetcher.matches(args)
@@ -671,7 +687,7 @@ def for_package_version(pkg, version):
class FetchError(spack.error.SpackError):
- def __init__(self, msg, long_msg):
+ def __init__(self, msg, long_msg=None):
super(FetchError, self).__init__(msg, long_msg)
@@ -689,7 +705,7 @@ class NoArchiveFileError(FetchError):
class NoDigestError(FetchError):
- def __init__(self, msg, long_msg):
+ def __init__(self, msg, long_msg=None):
super(NoDigestError, self).__init__(msg, long_msg)
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 5fb6a9cd23..f3732dfbff 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -523,7 +523,7 @@ def graph_dot(*specs, **kwargs):
return '"%s"' % string
if not specs:
- specs = [p.name for p in spack.db.all_packages()]
+ specs = [p.name for p in spack.repo.all_packages()]
else:
roots = specs
specs = set()
diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py
index 1c44e8abaa..2765f7be39 100644
--- a/lib/spack/spack/hooks/__init__.py
+++ b/lib/spack/spack/hooks/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/hooks/dotkit.py b/lib/spack/spack/hooks/dotkit.py
index 4e748ff80a..9123637356 100644
--- a/lib/spack/spack/hooks/dotkit.py
+++ b/lib/spack/spack/hooks/dotkit.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py
index cf87a78c8c..627184cabd 100644
--- a/lib/spack/spack/hooks/extensions.py
+++ b/lib/spack/spack/hooks/extensions.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -27,9 +27,7 @@ import spack
def pre_uninstall(pkg):
- # Need to do this b/c uninstall does not automatically do it.
- # TODO: store full graph info in stored .spec file.
- pkg.spec.normalize()
+ assert(pkg.spec.concrete)
if pkg.is_extension:
if pkg.activated:
diff --git a/lib/spack/spack/hooks/tclmodule.py b/lib/spack/spack/hooks/tclmodule.py
index 0b9fd5a67c..8b315f27a2 100644
--- a/lib/spack/spack/hooks/tclmodule.py
+++ b/lib/spack/spack/hooks/tclmodule.py
@@ -6,7 +6,7 @@
# Written by David Beckingsale, david@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 306c8085aa..1d9b0e7ef2 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -26,7 +26,7 @@
This file contains code for creating spack mirror directories. A
mirror is an organized hierarchy containing specially named archive
files. This enabled spack to know where to find files in a mirror if
-the main server for a particualr package is down. Or, if the computer
+the main server for a particular package is down. Or, if the computer
where spack is run is not connected to the internet, it allows spack
to download packages directly from a mirror (e.g., on an intranet).
"""
@@ -42,7 +42,7 @@ import spack.fetch_strategy as fs
from spack.spec import Spec
from spack.stage import Stage
from spack.version import *
-from spack.util.compression import extension
+from spack.util.compression import extension, allowed_archive
def mirror_archive_filename(spec):
@@ -87,11 +87,26 @@ def get_matching_versions(specs, **kwargs):
if v.satisfies(spec.versions):
s = Spec(pkg.name)
s.versions = VersionList([v])
+ s.variants = spec.variants.copy()
matching.append(s)
return matching
+def suggest_archive_basename(resource):
+ """
+ Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types.
+
+ :param fetcher:
+ :return:
+ """
+ basename = os.path.basename(resource.fetcher.url)
+ if not allowed_archive(basename):
+ raise RuntimeError("%s is not an allowed archive tye" % basename)
+ return basename
+
+
+
def create(path, specs, **kwargs):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
@@ -108,7 +123,7 @@ def create(path, specs, **kwargs):
Return Value:
Returns a tuple of lists: (present, mirrored, error)
- * present: Package specs that were already prsent.
+ * present: Package specs that were already present.
* mirrored: Package specs that were successfully mirrored.
* error: Package specs that failed to mirror due to some error.
@@ -140,38 +155,60 @@ def create(path, specs, **kwargs):
error = []
# Iterate through packages and download all the safe tarballs for each of them
+ everything_already_exists = True
for spec in version_specs:
pkg = spec.package
stage = None
try:
# create a subdirectory for the current package@version
- archive_path = os.path.abspath(join_path(path, mirror_archive_path(spec)))
+ archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec)))
subdir = os.path.dirname(archive_path)
mkdirp(subdir)
if os.path.exists(archive_path):
tty.msg("Already added %s" % spec.format("$_$@"))
+ else:
+ everything_already_exists = False
+ # Set up a stage and a fetcher for the download
+ unique_fetch_name = spec.format("$_$@")
+ fetcher = fs.for_package_version(pkg, pkg.version)
+ stage = Stage(fetcher, name=unique_fetch_name)
+ fetcher.set_stage(stage)
+
+ # Do the fetch and checksum if necessary
+ fetcher.fetch()
+ if not kwargs.get('no_checksum', False):
+ fetcher.check()
+ tty.msg("Checksum passed for %s@%s" % (pkg.name, pkg.version))
+
+ # Fetchers have to know how to archive their files. Use
+ # that to move/copy/create an archive in the mirror.
+ fetcher.archive(archive_path)
+ tty.msg("Added %s." % spec.format("$_$@"))
+
+ # Fetch resources if they are associated with the spec
+ resources = pkg._get_resources()
+ for resource in resources:
+ resource_archive_path = join_path(subdir, suggest_archive_basename(resource))
+ if os.path.exists(resource_archive_path):
+ tty.msg("Already added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version))
+ continue
+ everything_already_exists = False
+ resource_stage_folder = pkg._resource_stage(resource)
+ resource_stage = Stage(resource.fetcher, name=resource_stage_folder)
+ resource.fetcher.set_stage(resource_stage)
+ resource.fetcher.fetch()
+ if not kwargs.get('no_checksum', False):
+ resource.fetcher.check()
+ tty.msg("Checksum passed for the resource %s (%s@%s)" % (resource.name, pkg.name, pkg.version))
+ resource.fetcher.archive(resource_archive_path)
+ tty.msg("Added resource %s (%s@%s)." % (resource.name, pkg.name, pkg.version))
+
+ if everything_already_exists:
present.append(spec)
- continue
-
- # Set up a stage and a fetcher for the download
- unique_fetch_name = spec.format("$_$@")
- fetcher = fs.for_package_version(pkg, pkg.version)
- stage = Stage(fetcher, name=unique_fetch_name)
- fetcher.set_stage(stage)
-
- # Do the fetch and checksum if necessary
- fetcher.fetch()
- if not kwargs.get('no_checksum', False):
- fetcher.check()
- tty.msg("Checksum passed for %s@%s" % (pkg.name, pkg.version))
-
- # Fetchers have to know how to archive their files. Use
- # that to move/copy/create an archive in the mirror.
- fetcher.archive(archive_path)
- tty.msg("Added %s." % spec.format("$_$@"))
- mirrored.append(spec)
+ else:
+ mirrored.append(spec)
except Exception, e:
if spack.debug:
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index 56a61adefb..7036626e29 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -29,11 +29,11 @@ The various types of modules are installed by post-install hooks and
removed after an uninstall by post-uninstall hooks. This class
consolidates the logic for creating an abstract description of the
information that module systems need. Currently that includes a
-number directories to be appended to paths in the user's environment:
+number of directories to be appended to paths in the user's environment:
* /bin directories to be appended to PATH
* /lib* directories for LD_LIBRARY_PATH
- * /man* and /share/man* directories for LD_LIBRARY_PATH
+ * /man* and /share/man* directories for MANPATH
* the package prefix for CMAKE_PREFIX_PATH
This module also includes logic for coming up with unique names for
diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py
index 892619c6ac..df9b9b2ab1 100644
--- a/lib/spack/spack/multimethod.py
+++ b/lib/spack/spack/multimethod.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 1e2f0378c8..8dae85cf11 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -34,7 +34,9 @@ rundown on spack and how it differs from homebrew, look at the
README.
"""
import os
+import errno
import re
+import shutil
import time
import itertools
import subprocess
@@ -372,7 +374,7 @@ class Package(object):
self._total_time = 0.0
if self.is_extension:
- spack.db.get(self.extendee_spec)._check_extendable()
+ spack.repo.get(self.extendee_spec)._check_extendable()
@property
@@ -487,9 +489,15 @@ class Package(object):
if name == dep.name:
return dep
- # Otherwise return the spec from the extends() directive
- spec, kwargs = self.extendees[name]
- return spec
+ # if the spec is concrete already, then it extends something
+ # that is an *optional* dependency, and the dep isn't there.
+ if self.spec._concrete:
+ return None
+ else:
+ # If it's not concrete, then return the spec from the
+ # extends() directive since that is all we know so far.
+ spec, kwargs = self.extendees[name]
+ return spec
@property
@@ -497,18 +505,28 @@ class Package(object):
"""Spec of the extendee of this package, or None if it is not an extension."""
if not self.extendees:
return None
+
+ # TODO: allow multiple extendees.
name = next(iter(self.extendees))
return self.extendees[name][1]
@property
def is_extension(self):
- return len(self.extendees) > 0
+ # if it is concrete, it's only an extension if it actually
+ # dependes on the extendee.
+ if self.spec._concrete:
+ return self.extendee_spec is not None
+ else:
+ # If not, then it's an extension if it *could* be an extension
+ return bool(self.extendees)
def extends(self, spec):
- return (spec.name in self.extendees and
- spec.satisfies(self.extendees[spec.name][0]))
+ if not spec.name in self.extendees:
+ return False
+ s = self.extendee_spec
+ return s and s.satisfies(spec)
@property
@@ -548,7 +566,7 @@ class Package(object):
yield spec
continue
- for pkg in spack.db.get(name).preorder_traversal(visited, **kwargs):
+ for pkg in spack.repo.get(name).preorder_traversal(visited, **kwargs):
yield pkg
@@ -570,9 +588,12 @@ class Package(object):
@property
def installed_dependents(self):
"""Return a list of the specs of all installed packages that depend
- on this one."""
+ on this one.
+
+ TODO: move this method to database.py?
+ """
dependents = []
- for spec in spack.db.installed_package_specs():
+ for spec in spack.installed_db.query():
if self.name == spec.name:
continue
for dep in spec.traverse():
@@ -636,26 +657,76 @@ class Package(object):
"Will not fetch %s." % self.spec.format('$_$@'), checksum_msg)
self.stage.fetch()
+
+ ##########
+ # Fetch resources
+ resources = self._get_resources()
+ for resource in resources:
+ resource_stage_folder = self._resource_stage(resource)
+ # FIXME : works only for URLFetchStrategy
+ resource_mirror = join_path(self.name, os.path.basename(resource.fetcher.url))
+ resource_stage = Stage(resource.fetcher, name=resource_stage_folder, mirror_path=resource_mirror)
+ resource.fetcher.set_stage(resource_stage)
+ # Delegate to stage object to trigger mirror logic
+ resource_stage.fetch()
+ resource_stage.check()
+ ##########
+
self._fetch_time = time.time() - start_time
if spack.do_checksum and self.version in self.versions:
self.stage.check()
-
def do_stage(self):
"""Unpacks the fetched tarball, then changes into the expanded tarball
directory."""
if not self.spec.concrete:
raise ValueError("Can only stage concrete packages.")
- self.do_fetch()
+ def _expand_archive(stage, name=self.name):
+ archive_dir = stage.source_path
+ if not archive_dir:
+ stage.expand_archive()
+ tty.msg("Created stage in %s." % stage.path)
+ else:
+ tty.msg("Already staged %s in %s." % (name, stage.path))
- archive_dir = self.stage.source_path
- if not archive_dir:
- self.stage.expand_archive()
- tty.msg("Created stage in %s." % self.stage.path)
- else:
- tty.msg("Already staged %s in %s." % (self.name, self.stage.path))
+
+ self.do_fetch()
+ _expand_archive(self.stage)
+
+ ##########
+ # Stage resources in appropriate path
+ resources = self._get_resources()
+ # TODO: this is to allow nested resources, a better solution would be
+ # good
+ for resource in sorted(resources, key=lambda res: len(res.destination)):
+ stage = resource.fetcher.stage
+ _expand_archive(stage, resource.name)
+ # Turn placement into a dict with relative paths
+ placement = os.path.basename(stage.source_path) if resource.placement is None else resource.placement
+ if not isinstance(placement, dict):
+ placement = {'': placement}
+ # Make the paths in the dictionary absolute and link
+ for key, value in placement.iteritems():
+ target_path = join_path(self.stage.source_path, resource.destination)
+ link_path = join_path(target_path, value)
+ source_path = join_path(stage.source_path, key)
+
+ try:
+ os.makedirs(target_path)
+ except OSError as err:
+ if err.errno == errno.EEXIST and os.path.isdir(target_path):
+ pass
+ else: raise
+
+ # NOTE: a reasonable fix for the TODO above might be to have
+ # these expand in place, but expand_archive does not offer
+ # this
+
+ if not os.path.exists(link_path):
+ shutil.move(source_path, link_path)
+ ##########
self.stage.chdir_to_source()
@@ -678,9 +749,10 @@ class Package(object):
# Construct paths to special files in the archive dir used to
# keep track of whether patches were successfully applied.
- archive_dir = self.stage.source_path
- good_file = join_path(archive_dir, '.spack_patched')
- bad_file = join_path(archive_dir, '.spack_patch_failed')
+ archive_dir = self.stage.source_path
+ good_file = join_path(archive_dir, '.spack_patched')
+ no_patches_file = join_path(archive_dir, '.spack_no_patches')
+ bad_file = join_path(archive_dir, '.spack_patch_failed')
# If we encounter an archive that failed to patch, restage it
# so that we can apply all the patches again.
@@ -694,29 +766,52 @@ class Package(object):
if os.path.isfile(good_file):
tty.msg("Already patched %s" % self.name)
return
+ elif os.path.isfile(no_patches_file):
+ tty.msg("No patches needed for %s." % self.name)
+ return
# Apply all the patches for specs that match this one
+ patched = False
for spec, patch_list in self.patches.items():
if self.spec.satisfies(spec):
for patch in patch_list:
- tty.msg('Applying patch %s' % patch.path_or_url)
try:
patch.apply(self.stage)
+ tty.msg('Applied patch %s' % patch.path_or_url)
+ patched = True
except:
# Touch bad file if anything goes wrong.
+ tty.msg('Patch %s failed.' % patch.path_or_url)
touch(bad_file)
raise
- # patch succeeded. Get rid of failed file & touch good file so we
- # don't try to patch again again next time.
+ if has_patch_fun:
+ try:
+ self.patch()
+ tty.msg("Ran patch() for %s." % self.name)
+ patched = True
+ except:
+ tty.msg("patch() function failed for %s." % self.name)
+ touch(bad_file)
+ raise
+
+ # Get rid of any old failed file -- patches have either succeeded
+ # or are not needed. This is mostly defensive -- it's needed
+ # if the restage() method doesn't clean *everything* (e.g., for a repo)
if os.path.isfile(bad_file):
os.remove(bad_file)
- touch(good_file)
- if has_patch_fun:
- self.patch()
+ # touch good or no patches file so that we skip next time.
+ if patched:
+ touch(good_file)
+ else:
+ touch(no_patches_file)
- tty.msg("Patched %s" % self.name)
+
+ @property
+ def namespace(self):
+ namespace, dot, module = self.__module__.rpartition('.')
+ return namespace
def do_fake_install(self):
@@ -727,6 +822,19 @@ class Package(object):
mkdirp(self.prefix.man1)
+ def _get_resources(self):
+ resources = []
+ # Select the resources that are needed for this build
+ for when_spec, resource_list in self.resources.items():
+ if when_spec in self.spec:
+ resources.extend(resource_list)
+ return resources
+
+ def _resource_stage(self, resource):
+ pieces = ['resource', resource.name, self.spec.dag_hash()]
+ resource_stage_folder = '-'.join(pieces)
+ return resource_stage_folder
+
def _build_logger(self, log_path):
"""Create a context manager to log build output."""
@@ -753,6 +861,7 @@ class Package(object):
raise ValueError("Can only install concrete packages.")
if self.spec.external:
+ tty.msg("%s is externally installed in %s." % (self.name, self.spec.external))
return
if os.path.exists(self.prefix):
@@ -787,7 +896,8 @@ class Package(object):
tty.warn("Keeping install prefix in place despite error.",
"Spack will think this package is installed." +
"Manually remove this directory to fix:",
- self.prefix)
+ self.prefix, wrap=True)
+
def real_work():
try:
@@ -848,6 +958,10 @@ class Package(object):
# Do the build.
spack.build_environment.fork(self, real_work)
+ # note: PARENT of the build process adds the new package to
+ # the database, so that we don't need to re-read from file.
+ spack.installed_db.add(self.spec, self.prefix)
+
# Once everything else is done, run post install hooks
spack.hooks.post_install(self)
@@ -867,6 +981,14 @@ class Package(object):
@property
+ def build_log_path(self):
+ if self.installed:
+ return spack.install_layout.build_log_path(self.spec)
+ else:
+ return join_path(self.stage.source_path, 'spack-build.out')
+
+
+ @property
def module(self):
"""Use this to add variables to the class's module's scope.
This lets us use custom syntax in the install method.
@@ -920,6 +1042,7 @@ class Package(object):
# Uninstalling in Spack only requires removing the prefix.
self.remove_prefix()
+ spack.installed_db.remove(self.spec)
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
# Once everything else is done, run post install hooks
@@ -1085,7 +1208,7 @@ class Package(object):
raise VersionFetchError(self.__class__)
try:
- return find_versions_of_archive(
+ return spack.util.web.find_versions_of_archive(
*self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
except spack.error.NoNetworkConnectionError, e:
tty.die("Package.fetch_versions couldn't connect to:",
@@ -1109,49 +1232,6 @@ class Package(object):
return " ".join("-Wl,-rpath=%s" % p for p in self.rpath)
-def find_versions_of_archive(*archive_urls, **kwargs):
- list_url = kwargs.get('list_url', None)
- list_depth = kwargs.get('list_depth', 1)
-
- # Generate a list of list_urls based on archive urls and any
- # explicitly listed list_url in the package
- list_urls = set()
- if list_url:
- list_urls.add(list_url)
- for aurl in archive_urls:
- list_urls.add(spack.url.find_list_url(aurl))
-
- # Grab some web pages to scrape.
- page_map = {}
- for lurl in list_urls:
- pages = spack.util.web.get_pages(lurl, depth=list_depth)
- page_map.update(pages)
-
- # Scrape them for archive URLs
- regexes = []
- for aurl in archive_urls:
- # This creates a regex from the URL with a capture group for
- # the version part of the URL. The capture group is converted
- # to a generic wildcard, so we can use this to extract things
- # on a page that look like archive URLs.
- url_regex = spack.url.wildcard_version(aurl)
-
- # We'll be a bit more liberal and just look for the archive
- # part, not the full path.
- regexes.append(os.path.basename(url_regex))
-
- # Build a version list from all the matches we find
- versions = {}
- for page_url, content in page_map.iteritems():
- # extract versions from matches.
- for regex in regexes:
- versions.update(
- (Version(m.group(1)), urljoin(page_url, m.group(0)))
- for m in re.finditer(regex, content))
-
- return versions
-
-
def validate_package_url(url_string):
"""Determine whether spack can handle a particular URL or not."""
url = urlparse(url_string)
diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py
deleted file mode 100644
index adfbc26c1d..0000000000
--- a/lib/spack/spack/packages.py
+++ /dev/null
@@ -1,251 +0,0 @@
-##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://scalability-llnl.github.io/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import os
-import sys
-import inspect
-import glob
-import imp
-
-import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
-from llnl.util.lang import *
-
-import spack.error
-import spack.spec
-from spack.virtual import ProviderIndex
-from spack.util.naming import mod_to_class, validate_module_name
-
-# Name of module under which packages are imported
-_imported_packages_module = 'spack.packages'
-
-# Name of the package file inside a package directory
-_package_file_name = 'package.py'
-
-
-def _autospec(function):
- """Decorator that automatically converts the argument of a single-arg
- function to a Spec."""
- def converter(self, spec_like, **kwargs):
- if not isinstance(spec_like, spack.spec.Spec):
- spec_like = spack.spec.Spec(spec_like)
- return function(self, spec_like, **kwargs)
- return converter
-
-
-class PackageDB(object):
- def __init__(self, root):
- """Construct a new package database from a root directory."""
- self.root = root
- self.instances = {}
- self.provider_index = None
-
-
- @_autospec
- def get(self, spec, **kwargs):
- if spec.virtual:
- raise UnknownPackageError(spec.name)
-
- if kwargs.get('new', False):
- if spec in self.instances:
- del self.instances[spec]
-
- if not spec in self.instances:
- package_class = self.get_class_for_package_name(spec.name)
- try:
- copy = spec.copy()
- self.instances[copy] = package_class(copy)
- except Exception, e:
- if spack.debug:
- sys.excepthook(*sys.exc_info())
- raise FailedConstructorError(spec.name, e)
-
- return self.instances[spec]
-
-
- @_autospec
- def delete(self, spec):
- """Force a package to be recreated."""
- del self.instances[spec]
-
-
- def purge(self):
- """Clear entire package instance cache."""
- self.instances.clear()
-
-
- @_autospec
- def get_installed(self, spec):
- """Get all the installed specs that satisfy the provided spec constraint."""
- return [s for s in self.installed_package_specs() if s.satisfies(spec)]
-
-
- @_autospec
- def providers_for(self, vpkg_spec):
- if self.provider_index is None:
- self.provider_index = ProviderIndex(self.all_package_names())
-
- providers = self.provider_index.providers_for(vpkg_spec)
- if not providers:
- raise UnknownPackageError(vpkg_spec.name)
- return providers
-
-
- @_autospec
- def extensions_for(self, extendee_spec):
- return [p for p in self.all_packages() if p.extends(extendee_spec)]
-
-
- @_autospec
- def installed_extensions_for(self, extendee_spec):
- for s in self.installed_package_specs():
- try:
- if s.package.extends(extendee_spec):
- yield s.package
- except UnknownPackageError, e:
- # Skip packages we know nothing about
- continue
- # TODO: add some conditional way to do this instead of
- # catching exceptions.
-
-
- def dirname_for_package_name(self, pkg_name):
- """Get the directory name for a particular package. This is the
- directory that contains its package.py file."""
- return join_path(self.root, pkg_name)
-
-
- def filename_for_package_name(self, pkg_name):
- """Get the filename for the module we should load for a particular
- package. Packages for a pacakge DB live in
- ``$root/<package_name>/package.py``
-
- This will return a proper package.py path even if the
- package doesn't exist yet, so callers will need to ensure
- the package exists before importing.
- """
- validate_module_name(pkg_name)
- pkg_dir = self.dirname_for_package_name(pkg_name)
- return join_path(pkg_dir, _package_file_name)
-
-
- def installed_package_specs(self):
- """Read installed package names straight from the install directory
- layout.
- """
- # Get specs from the directory layout but ensure that they're
- # all normalized properly.
- installed = []
- for spec in spack.install_layout.all_specs():
- spec.normalize()
- installed.append(spec)
- return installed
-
-
- def installed_known_package_specs(self):
- """Read installed package names straight from the install
- directory layout, but return only specs for which the
- package is known to this version of spack.
- """
- for spec in spack.install_layout.all_specs():
- if self.exists(spec.name):
- yield spec
-
-
- @memoized
- def all_package_names(self):
- """Generator function for all packages. This looks for
- ``<pkg_name>/package.py`` files within the root direcotry"""
- all_package_names = []
- for pkg_name in os.listdir(self.root):
- pkg_dir = join_path(self.root, pkg_name)
- pkg_file = join_path(pkg_dir, _package_file_name)
- if os.path.isfile(pkg_file):
- all_package_names.append(pkg_name)
- all_package_names.sort()
- return all_package_names
-
-
- def all_packages(self):
- for name in self.all_package_names():
- yield self.get(name)
-
-
- @memoized
- def exists(self, pkg_name):
- """Whether a package with the supplied name exists ."""
- return os.path.exists(self.filename_for_package_name(pkg_name))
-
-
- @memoized
- def get_class_for_package_name(self, pkg_name):
- """Get an instance of the class for a particular package.
-
- This method uses Python's ``imp`` package to load python
- source from a Spack package's ``package.py`` file. A
- normal python import would only load each package once, but
- because we do this dynamically, the method needs to be
- memoized to ensure there is only ONE package class
- instance, per package, per database.
- """
- file_path = self.filename_for_package_name(pkg_name)
-
- if os.path.exists(file_path):
- if not os.path.isfile(file_path):
- tty.die("Something's wrong. '%s' is not a file!" % file_path)
- if not os.access(file_path, os.R_OK):
- tty.die("Cannot read '%s'!" % file_path)
- else:
- raise UnknownPackageError(pkg_name)
-
- class_name = mod_to_class(pkg_name)
- try:
- module_name = _imported_packages_module + '.' + pkg_name
- module = imp.load_source(module_name, file_path)
-
- except ImportError, e:
- tty.die("Error while importing %s from %s:\n%s" % (
- pkg_name, file_path, e.message))
-
- cls = getattr(module, class_name)
- if not inspect.isclass(cls):
- tty.die("%s.%s is not a class" % (pkg_name, class_name))
-
- return cls
-
-
-class UnknownPackageError(spack.error.SpackError):
- """Raised when we encounter a package spack doesn't have."""
- def __init__(self, name):
- super(UnknownPackageError, self).__init__("Package '%s' not found." % name)
- self.name = name
-
-
-class FailedConstructorError(spack.error.SpackError):
- """Raised when a package's class constructor fails."""
- def __init__(self, name, reason):
- super(FailedConstructorError, self).__init__(
- "Class constructor failed for package '%s'." % name,
- str(reason))
- self.name = name
diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py
index bc12ec258c..e9467aa685 100644
--- a/lib/spack/spack/parse.py
+++ b/lib/spack/spack/parse.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index b1b6e07738..b82a047753 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -41,8 +41,8 @@ class Patch(object):
"""This class describes a patch to be applied to some expanded
source code."""
- def __init__(self, pkg_name, path_or_url, level):
- self.pkg_name = pkg_name
+ def __init__(self, pkg, path_or_url, level):
+ self.pkg_name = pkg.name
self.path_or_url = path_or_url
self.path = None
self.url = None
@@ -54,7 +54,7 @@ class Patch(object):
if '://' in path_or_url:
self.url = path_or_url
else:
- pkg_dir = spack.db.dirname_for_package_name(pkg_name)
+ pkg_dir = spack.repo.dirname_for_package_name(self.pkg_name)
self.path = join_path(pkg_dir, path_or_url)
if not os.path.isfile(self.path):
raise NoSuchPatchFileError(pkg_name, self.path)
diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py
index bc2a4ac234..2b0ba791b6 100644
--- a/lib/spack/spack/preferred_packages.py
+++ b/lib/spack/spack/preferred_packages.py
@@ -30,7 +30,7 @@ class PreferredPackages(object):
_default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] }, #Arbitrary, but consistent
def __init__(self):
- self.preferred = spack.config.get_preferred_config()
+ self.preferred = spack.config.get_config('packages')
self._spec_for_pkgname_cache = {}
#Given a package name, sort component (e.g, version, compiler, ...), and
@@ -40,22 +40,12 @@ class PreferredPackages(object):
if test_all:
pkglist.append('all')
for pkg in pkglist:
- if not pkg in self.preferred:
- continue
- orders = self.preferred[pkg]
- if not type(orders) is dict:
- continue
- if not component in orders:
- continue
- order = orders[component]
+ order = self.preferred.get(pkg, {}).get(component, {})
if type(order) is dict:
- if not second_key in order:
- continue;
- order = order[second_key]
- if not type(order) is str:
- tty.die('Expected version list in preferred config, but got %s' % str(order))
- order_list = order.split(',')
- return [s.strip() for s in order_list]
+ order = order.get(second_key, {})
+ if not order:
+ continue
+ return [s.strip() for s in order]
return []
@@ -63,6 +53,10 @@ class PreferredPackages(object):
# component, return less-than-0, 0, or greater-than-0 if
# a is respectively less-than, equal to, or greater than b.
def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
+ if a is None:
+ return -1
+ if b is None:
+ return 1
orderlist = self._order_for_package(pkgname, component, second_key)
a_in_list = str(a) in orderlist
b_in_list = str(b) in orderlist
@@ -95,6 +89,10 @@ class PreferredPackages(object):
# a and b are considered to match entries in the sorting list if they
# satisfy the list component.
def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
+ if not a or not a.concrete:
+ return -1
+ if not b or not b.concrete:
+ return 1
specs = self._spec_for_pkgname(pkgname, component, second_key)
a_index = None
b_index = None
diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py
new file mode 100644
index 0000000000..31596cee7a
--- /dev/null
+++ b/lib/spack/spack/repository.py
@@ -0,0 +1,747 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import exceptions
+import sys
+import inspect
+import imp
+import re
+import traceback
+from bisect import bisect_left
+from external import yaml
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import join_path
+
+import spack.error
+import spack.config
+import spack.spec
+from spack.virtual import ProviderIndex
+from spack.util.naming import *
+
+#
+# Super-namespace for all packages.
+# Package modules are imported as spack.pkg.<namespace>.<pkg-name>.
+#
+repo_namespace = 'spack.pkg'
+
+#
+# These names describe how repos should be laid out in the filesystem.
+#
+repo_config_name = 'repo.yaml' # Top-level filename for repo config.
+packages_dir_name = 'packages' # Top-level repo directory containing pkgs.
+package_file_name = 'package.py' # Filename for packages in a repository.
+
+# Guaranteed unused default value for some functions.
+NOT_PROVIDED = object()
+
+
+def _autospec(function):
+ """Decorator that automatically converts the argument of a single-arg
+ function to a Spec."""
+ def converter(self, spec_like, *args, **kwargs):
+ if not isinstance(spec_like, spack.spec.Spec):
+ spec_like = spack.spec.Spec(spec_like)
+ return function(self, spec_like, *args, **kwargs)
+ return converter
+
+
+def _make_namespace_module(ns):
+ module = imp.new_module(ns)
+ module.__file__ = "(spack namespace)"
+ module.__path__ = []
+ module.__package__ = ns
+ return module
+
+
+def substitute_spack_prefix(path):
+ """Replaces instances of $spack with Spack's prefix."""
+ return re.sub(r'^\$spack', spack.prefix, path)
+
+
+def canonicalize_path(path):
+ """Substitute $spack, expand user home, take abspath."""
+ path = substitute_spack_prefix(path)
+ path = os.path.expanduser(path)
+ path = os.path.abspath(path)
+ return path
+
+
+class RepoPath(object):
+ """A RepoPath is a list of repos that function as one.
+
+ It functions exactly like a Repo, but it operates on the
+ combined results of the Repos in its list instead of on a
+ single package repository.
+ """
+ def __init__(self, *repo_dirs, **kwargs):
+ # super-namespace for all packages in the RepoPath
+ self.super_namespace = kwargs.get('namespace', repo_namespace)
+
+ self.repos = []
+ self.by_namespace = NamespaceTrie()
+ self.by_path = {}
+
+ self._all_package_names = []
+ self._provider_index = None
+
+ # If repo_dirs is empty, just use the configuration
+ if not repo_dirs:
+ repo_dirs = spack.config.get_config('repos')
+ if not repo_dirs:
+ raise NoRepoConfiguredError(
+ "Spack configuration contains no package repositories.")
+
+ # Add each repo to this path.
+ for root in repo_dirs:
+ try:
+ repo = Repo(root, self.super_namespace)
+ self.put_last(repo)
+ except RepoError as e:
+ tty.warn("Failed to initialize repository at '%s'." % root,
+ e.message,
+ "To remove the bad repository, run this command:",
+ " spack repo rm %s" % root)
+
+
+ def swap(self, other):
+ """Convenience function to make swapping repostiories easier.
+
+ This is currently used by mock tests.
+ TODO: Maybe there is a cleaner way.
+
+ """
+ attrs = ['repos',
+ 'by_namespace',
+ 'by_path',
+ '_all_package_names',
+ '_provider_index']
+ for attr in attrs:
+ tmp = getattr(self, attr)
+ setattr(self, attr, getattr(other, attr))
+ setattr(other, attr, tmp)
+
+
+ def _add(self, repo):
+ """Add a repository to the namespace and path indexes.
+
+ Checks for duplicates -- two repos can't have the same root
+ directory, and they provide have the same namespace.
+
+ """
+ if repo.root in self.by_path:
+ raise DuplicateRepoError("Duplicate repository: '%s'" % repo.root)
+
+ if repo.namespace in self.by_namespace:
+ raise DuplicateRepoError(
+ "Package repos '%s' and '%s' both provide namespace %s."
+ % (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
+
+ # Add repo to the pkg indexes
+ self.by_namespace[repo.full_namespace] = repo
+ self.by_path[repo.root] = repo
+
+ # add names to the cached name list
+ new_pkgs = set(repo.all_package_names())
+ new_pkgs.update(set(self._all_package_names))
+ self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower())
+
+
+ def put_first(self, repo):
+ """Add repo first in the search path."""
+ self._add(repo)
+ self.repos.insert(0, repo)
+
+
+ def put_last(self, repo):
+ """Add repo last in the search path."""
+ self._add(repo)
+ self.repos.append(repo)
+
+
+ def remove(self, repo):
+ """Remove a repo from the search path."""
+ if repo in self.repos:
+ self.repos.remove(repo)
+
+
+ def get_repo(self, namespace, default=NOT_PROVIDED):
+ """Get a repository by namespace.
+ Arguments
+ namespace
+ Look up this namespace in the RepoPath, and return
+ it if found.
+
+ Optional Arguments
+ default
+ If default is provided, return it when the namespace
+ isn't found. If not, raise an UnknownNamespaceError.
+ """
+ fullspace = '%s.%s' % (self.super_namespace, namespace)
+ if fullspace not in self.by_namespace:
+ if default == NOT_PROVIDED:
+ raise UnknownNamespaceError(namespace)
+ return default
+ return self.by_namespace[fullspace]
+
+
+ def first_repo(self):
+ """Get the first repo in precedence order."""
+ return self.repos[0] if self.repos else None
+
+
+ def all_package_names(self):
+ """Return all unique package names in all repositories."""
+ return self._all_package_names
+
+
+ def all_packages(self):
+ for name in self.all_package_names():
+ yield self.get(name)
+
+
+ @_autospec
+ def providers_for(self, vpkg_spec):
+ if self._provider_index is None:
+ self._provider_index = ProviderIndex(self.all_package_names())
+
+ providers = self._provider_index.providers_for(vpkg_spec)
+ if not providers:
+ raise UnknownPackageError(vpkg_spec.name)
+ return providers
+
+
+ def find_module(self, fullname, path=None):
+ """Implements precedence for overlaid namespaces.
+
+ Loop checks each namespace in self.repos for packages, and
+ also handles loading empty containing namespaces.
+
+ """
+ # namespaces are added to repo, and package modules are leaves.
+ namespace, dot, module_name = fullname.rpartition('.')
+
+ # If it's a module in some repo, or if it is the repo's
+ # namespace, let the repo handle it.
+ for repo in self.repos:
+ if namespace == repo.full_namespace:
+ if repo.real_name(module_name):
+ return repo
+ elif fullname == repo.full_namespace:
+ return repo
+
+ # No repo provides the namespace, but it is a valid prefix of
+ # something in the RepoPath.
+ if self.by_namespace.is_prefix(fullname):
+ return self
+
+ return None
+
+
+ def load_module(self, fullname):
+ """Handles loading container namespaces when necessary.
+
+ See ``Repo`` for how actual package modules are loaded.
+ """
+ if fullname in sys.modules:
+ return sys.modules[fullname]
+
+ # partition fullname into prefix and module name.
+ namespace, dot, module_name = fullname.rpartition('.')
+
+ if not self.by_namespace.is_prefix(fullname):
+ raise ImportError("No such Spack repo: %s" % fullname)
+
+ module = _make_namespace_module(namespace)
+ module.__loader__ = self
+ sys.modules[fullname] = module
+ return module
+
+
+ @_autospec
+ def repo_for_pkg(self, spec):
+ """Given a spec, get the repository for its package."""
+ # If the spec already has a namespace, then return the
+ # corresponding repo if we know about it.
+ if spec.namespace:
+ fullspace = '%s.%s' % (self.super_namespace, spec.namespace)
+ if fullspace not in self.by_namespace:
+ raise UnknownNamespaceError(spec.namespace)
+ return self.by_namespace[fullspace]
+
+ # If there's no namespace, search in the RepoPath.
+ for repo in self.repos:
+ if spec.name in repo:
+ return repo
+ else:
+ raise UnknownPackageError(spec.name)
+
+
+ @_autospec
+ def get(self, spec, new=False):
+ """Find a repo that contains the supplied spec's package.
+
+ Raises UnknownPackageError if not found.
+ """
+ return self.repo_for_pkg(spec).get(spec)
+
+
+ def dirname_for_package_name(self, pkg_name):
+ return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
+
+
+ def filename_for_package_name(self, pkg_name):
+ return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
+
+
+ def exists(self, pkg_name):
+ return any(repo.exists(pkg_name) for repo in self.repos)
+
+
+ def __contains__(self, pkg_name):
+ return self.exists(pkg_name)
+
+
+
+class Repo(object):
+ """Class representing a package repository in the filesystem.
+
+ Each package repository must have a top-level configuration file
+ called `repo.yaml`.
+
+ Currently, `repo.yaml` this must define:
+
+ `namespace`:
+ A Python namespace where the repository's packages should live.
+
+ """
+ def __init__(self, root, namespace=repo_namespace):
+ """Instantiate a package repository from a filesystem path.
+
+ Arguments:
+ root The root directory of the repository.
+
+ namespace A super-namespace that will contain the repo-defined
+ namespace (this is generally jsut `spack.pkg`). The
+ super-namespace is Spack's way of separating repositories
+ from other python namespaces.
+
+ """
+ # Root directory, containing _repo.yaml and package dirs
+ # Allow roots to by spack-relative by starting with '$spack'
+ self.root = canonicalize_path(root)
+
+ # super-namespace for all packages in the Repo
+ self.super_namespace = namespace
+
+ # check and raise BadRepoError on fail.
+ def check(condition, msg):
+ if not condition: raise BadRepoError(msg)
+
+ # Validate repository layout.
+ self.config_file = join_path(self.root, repo_config_name)
+ check(os.path.isfile(self.config_file),
+ "No %s found in '%s'" % (repo_config_name, root))
+ self.packages_path = join_path(self.root, packages_dir_name)
+ check(os.path.isdir(self.packages_path),
+ "No directory '%s' found in '%s'" % (repo_config_name, root))
+
+ # Read configuration and validate namespace
+ config = self._read_config()
+ check('namespace' in config, '%s must define a namespace.'
+ % join_path(root, repo_config_name))
+
+ self.namespace = config['namespace']
+ check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace),
+ ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) +
+ "Namespaces must be valid python identifiers separated by '.'")
+
+ # Set up 'full_namespace' to include the super-namespace
+ if self.super_namespace:
+ self.full_namespace = "%s.%s" % (self.super_namespace, self.namespace)
+ else:
+ self.full_namespace = self.namespace
+
+ # Keep name components around for checking prefixes.
+ self._names = self.full_namespace.split('.')
+
+ # These are internal cache variables.
+ self._modules = {}
+ self._classes = {}
+ self._instances = {}
+ self._provider_index = None
+ self._all_package_names = None
+
+ # make sure the namespace for packages in this repo exists.
+ self._create_namespace()
+
+
+ def _create_namespace(self):
+ """Create this repo's namespace module and insert it into sys.modules.
+
+ Ensures that modules loaded via the repo have a home, and that
+ we don't get runtime warnings from Python's module system.
+
+ """
+ parent = None
+ for l in range(1, len(self._names)+1):
+ ns = '.'.join(self._names[:l])
+ if not ns in sys.modules:
+ module = _make_namespace_module(ns)
+ module.__loader__ = self
+ sys.modules[ns] = module
+
+ # Ensure the namespace is an atrribute of its parent,
+ # if it has not been set by something else already.
+ #
+ # This ensures that we can do things like:
+ # import spack.pkg.builtin.mpich as mpich
+ if parent:
+ modname = self._names[l-1]
+ if not hasattr(parent, modname):
+ setattr(parent, modname, module)
+ else:
+ # no need to set up a module, but keep track of the parent.
+ module = sys.modules[ns]
+ parent = module
+
+
+ def real_name(self, import_name):
+ """Allow users to import Spack packages using Python identifiers.
+
+ A python identifier might map to many different Spack package
+ names due to hyphen/underscore ambiguity.
+
+ Easy example:
+ num3proxy -> 3proxy
+
+ Ambiguous:
+ foo_bar -> foo_bar, foo-bar
+
+ More ambiguous:
+ foo_bar_baz -> foo_bar_baz, foo-bar-baz, foo_bar-baz, foo-bar_baz
+ """
+ if import_name in self:
+ return import_name
+
+ options = possible_spack_module_names(import_name)
+ options.remove(import_name)
+ for name in options:
+ if name in self:
+ return name
+ return None
+
+
+ def is_prefix(self, fullname):
+ """True if fullname is a prefix of this Repo's namespace."""
+ parts = fullname.split('.')
+ return self._names[:len(parts)] == parts
+
+
+ def find_module(self, fullname, path=None):
+ """Python find_module import hook.
+
+ Returns this Repo if it can load the module; None if not.
+ """
+ if self.is_prefix(fullname):
+ return self
+
+ namespace, dot, module_name = fullname.rpartition('.')
+ if namespace == self.full_namespace:
+ if self.real_name(module_name):
+ return self
+
+ return None
+
+
+ def load_module(self, fullname):
+ """Python importer load hook.
+
+ Tries to load the module; raises an ImportError if it can't.
+ """
+ if fullname in sys.modules:
+ return sys.modules[fullname]
+
+ namespace, dot, module_name = fullname.rpartition('.')
+
+ if self.is_prefix(fullname):
+ module = _make_namespace_module(fullname)
+
+ elif namespace == self.full_namespace:
+ real_name = self.real_name(module_name)
+ if not real_name:
+ raise ImportError("No module %s in %s" % (module_name, self))
+ module = self._get_pkg_module(real_name)
+
+ else:
+ raise ImportError("No module %s in %s" % (fullname, self))
+
+ module.__loader__ = self
+ sys.modules[fullname] = module
+ return module
+
+
+ def _read_config(self):
+ """Check for a YAML config file in this db's root directory."""
+ try:
+ with open(self.config_file) as reponame_file:
+ yaml_data = yaml.load(reponame_file)
+
+ if (not yaml_data or 'repo' not in yaml_data or
+ not isinstance(yaml_data['repo'], dict)):
+ tty.die("Invalid %s in repository %s"
+ % (repo_config_name, self.root))
+
+ return yaml_data['repo']
+
+ except exceptions.IOError, e:
+ tty.die("Error reading %s when opening %s"
+ % (self.config_file, self.root))
+
+
+ @_autospec
+ def get(self, spec, new=False):
+ if spec.virtual:
+ raise UnknownPackageError(spec.name)
+
+ if spec.namespace and spec.namespace != self.namespace:
+ raise UnknownPackageError("Repository %s does not contain package %s."
+ % (self.namespace, spec.fullname))
+
+ key = hash(spec)
+ if new or key not in self._instances:
+ package_class = self._get_pkg_class(spec.name)
+ try:
+ copy = spec.copy() # defensive copy. Package owns its spec.
+ self._instances[key] = package_class(copy)
+ except Exception, e:
+ if spack.debug:
+ sys.excepthook(*sys.exc_info())
+ raise FailedConstructorError(spec.fullname, *sys.exc_info())
+
+ return self._instances[key]
+
+
+ def purge(self):
+ """Clear entire package instance cache."""
+ self._instances.clear()
+
+
+ @_autospec
+ def providers_for(self, vpkg_spec):
+ if self._provider_index is None:
+ self._provider_index = ProviderIndex(self.all_package_names())
+
+ providers = self._provider_index.providers_for(vpkg_spec)
+ if not providers:
+ raise UnknownPackageError(vpkg_spec.name)
+ return providers
+
+
+ @_autospec
+ def extensions_for(self, extendee_spec):
+ return [p for p in self.all_packages() if p.extends(extendee_spec)]
+
+
+ def _check_namespace(self, spec):
+ """Check that the spec's namespace is the same as this repository's."""
+ if spec.namespace and spec.namespace != self.namespace:
+ raise UnknownNamespaceError(spec.namespace)
+
+
+ @_autospec
+ def dirname_for_package_name(self, spec):
+ """Get the directory name for a particular package. This is the
+ directory that contains its package.py file."""
+ self._check_namespace(spec)
+ return join_path(self.packages_path, spec.name)
+
+
+ @_autospec
+ def filename_for_package_name(self, spec):
+ """Get the filename for the module we should load for a particular
+ package. Packages for a Repo live in
+ ``$root/<package_name>/package.py``
+
+ This will return a proper package.py path even if the
+ package doesn't exist yet, so callers will need to ensure
+ the package exists before importing.
+ """
+ self._check_namespace(spec)
+ pkg_dir = self.dirname_for_package_name(spec.name)
+ return join_path(pkg_dir, package_file_name)
+
+
+ def all_package_names(self):
+ """Returns a sorted list of all package names in the Repo."""
+ if self._all_package_names is None:
+ self._all_package_names = []
+
+ for pkg_name in os.listdir(self.packages_path):
+ # Skip non-directories in the package root.
+ pkg_dir = join_path(self.packages_path, pkg_name)
+ if not os.path.isdir(pkg_dir):
+ continue
+
+ # Skip directories without a package.py in them.
+ pkg_file = join_path(self.packages_path, pkg_name, package_file_name)
+ if not os.path.isfile(pkg_file):
+ continue
+
+ # Warn about invalid names that look like packages.
+ if not valid_module_name(pkg_name):
+ tty.warn("Skipping package at %s. '%s' is not a valid Spack module name."
+ % (pkg_dir, pkg_name))
+ continue
+
+ # All checks passed. Add it to the list.
+ self._all_package_names.append(pkg_name)
+ self._all_package_names.sort()
+
+ return self._all_package_names
+
+
+ def all_packages(self):
+ for name in self.all_package_names():
+ yield self.get(name)
+
+
+ def exists(self, pkg_name):
+ """Whether a package with the supplied name exists."""
+ # This does a binary search in the sorted list.
+ idx = bisect_left(self.all_package_names(), pkg_name)
+ return (idx < len(self._all_package_names) and
+ self._all_package_names[idx] == pkg_name)
+
+
+ def _get_pkg_module(self, pkg_name):
+ """Create a module for a particular package.
+
+ This caches the module within this Repo *instance*. It does
+ *not* add it to ``sys.modules``. So, you can construct
+ multiple Repos for testing and ensure that the module will be
+ loaded once per repo.
+
+ """
+ if pkg_name not in self._modules:
+ file_path = self.filename_for_package_name(pkg_name)
+
+ if not os.path.exists(file_path):
+ raise UnknownPackageError(pkg_name, self)
+
+ if not os.path.isfile(file_path):
+ tty.die("Something's wrong. '%s' is not a file!" % file_path)
+
+ if not os.access(file_path, os.R_OK):
+ tty.die("Cannot read '%s'!" % file_path)
+
+ # e.g., spack.pkg.builtin.mpich
+ fullname = "%s.%s" % (self.full_namespace, pkg_name)
+
+ module = imp.load_source(fullname, file_path)
+ module.__package__ = self.full_namespace
+ module.__loader__ = self
+ self._modules[pkg_name] = module
+
+ return self._modules[pkg_name]
+
+
+ def _get_pkg_class(self, pkg_name):
+ """Get the class for the package out of its module.
+
+ First loads (or fetches from cache) a module for the
+ package. Then extracts the package class from the module
+ according to Spack's naming convention.
+ """
+ class_name = mod_to_class(pkg_name)
+ module = self._get_pkg_module(pkg_name)
+
+ cls = getattr(module, class_name)
+ if not inspect.isclass(cls):
+ tty.die("%s.%s is not a class" % (pkg_name, class_name))
+
+ return cls
+
+
+ def __str__(self):
+ return "[Repo '%s' at '%s']" % (self.namespace, self.root)
+
+
+ def __repr__(self):
+ return self.__str__()
+
+
+ def __contains__(self, pkg_name):
+ return self.exists(pkg_name)
+
+
+class RepoError(spack.error.SpackError):
+ """Superclass for repository-related errors."""
+
+
+class NoRepoConfiguredError(RepoError):
+ """Raised when there are no repositories configured."""
+
+
+class BadRepoError(RepoError):
+ """Raised when repo layout is invalid."""
+
+
+class DuplicateRepoError(RepoError):
+ """Raised when duplicate repos are added to a RepoPath."""
+
+
+class PackageLoadError(spack.error.SpackError):
+ """Superclass for errors related to loading packages."""
+
+
+class UnknownPackageError(PackageLoadError):
+ """Raised when we encounter a package spack doesn't have."""
+ def __init__(self, name, repo=None):
+ msg = None
+ if repo:
+ msg = "Package %s not found in repository %s." % (name, repo)
+ else:
+ msg = "Package %s not found." % name
+ super(UnknownPackageError, self).__init__(msg)
+ self.name = name
+
+
+class UnknownNamespaceError(PackageLoadError):
+ """Raised when we encounter an unknown namespace"""
+ def __init__(self, namespace):
+ super(UnknownNamespaceError, self).__init__(
+ "Unknown namespace: %s" % namespace)
+
+
+class FailedConstructorError(PackageLoadError):
+ """Raised when a package's class constructor fails."""
+ def __init__(self, name, exc_type, exc_obj, exc_tb):
+ super(FailedConstructorError, self).__init__(
+ "Class constructor failed for package '%s'." % name,
+ '\nCaused by:\n' +
+ ('%s: %s\n' % (exc_type.__name__, exc_obj)) +
+ ''.join(traceback.format_tb(exc_tb)))
+ self.name = name
diff --git a/lib/spack/spack/resource.py b/lib/spack/spack/resource.py
new file mode 100644
index 0000000000..2bf92947fd
--- /dev/null
+++ b/lib/spack/spack/resource.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Describes an optional resource needed for a build.
+
+Typically a bunch of sources that can be built in-tree within another
+package to enable optional features.
+
+"""
+
+
+class Resource(object):
+ """
+ Represents an optional resource. Aggregates a name, a fetcher, a destination and a placement
+ """
+ def __init__(self, name, fetcher, destination, placement):
+ self.name = name
+ self.fetcher = fetcher
+ self.destination = destination
+ self.placement = placement
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 49b67cd361..6f55065f01 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -96,8 +96,8 @@ import hashlib
import base64
from StringIO import StringIO
from operator import attrgetter
-from external import yaml
-from external.yaml.error import MarkedYAMLError
+import yaml
+from yaml.error import MarkedYAMLError
import llnl.util.tty as tty
from llnl.util.lang import *
@@ -412,6 +412,7 @@ class Spec(object):
self.dependencies = other.dependencies
self.variants = other.variants
self.variants.spec = self
+ self.namespace = other.namespace
# Specs are by default not assumed to be normal, but in some
# cases we've read them from a file want to assume normal.
@@ -465,6 +466,13 @@ class Spec(object):
self.dependencies[spec.name] = spec
spec.dependents[self.name] = self
+ #
+ # Public interface
+ #
+ @property
+ def fullname(self):
+ return '%s.%s' % (self.namespace, self.name) if self.namespace else self.name
+
@property
def root(self):
@@ -487,7 +495,7 @@ class Spec(object):
@property
def package(self):
- return spack.db.get(self)
+ return spack.repo.get(self)
@property
@@ -505,7 +513,7 @@ class Spec(object):
@staticmethod
def is_virtual(name):
"""Test if a name is virtual without requiring a Spec."""
- return not spack.db.exists(name)
+ return not spack.repo.exists(name)
@property
@@ -518,11 +526,13 @@ class Spec(object):
return True
self._concrete = bool(not self.virtual
+ and self.namespace is not None
and self.versions.concrete
and self.variants.concrete
and self.architecture
and self.compiler and self.compiler.concrete
and self.dependencies.concrete)
+
return self._concrete
@@ -641,7 +651,9 @@ class Spec(object):
def dag_hash(self, length=None):
- """Return a hash of the entire spec DAG, including connectivity."""
+ """
+ Return a hash of the entire spec DAG, including connectivity.
+ """
yaml_text = yaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
sha = hashlib.sha1(yaml_text)
@@ -656,6 +668,12 @@ class Spec(object):
'dependencies' : dict((d, self.dependencies[d].dag_hash())
for d in sorted(self.dependencies))
}
+
+ # Older concrete specs do not have a namespace. Omit for
+ # consistent hashing.
+ if not self.concrete or self.namespace:
+ d['namespace'] = self.namespace
+
if self.compiler:
d.update(self.compiler.to_dict())
else:
@@ -680,6 +698,7 @@ class Spec(object):
node = node[name]
spec = Spec(name)
+ spec.namespace = node.get('namespace', None)
spec.versions = VersionList.from_dict(node)
spec.architecture = node['arch']
@@ -711,7 +730,7 @@ class Spec(object):
try:
yfile = yaml.load(stream)
except MarkedYAMLError, e:
- raise SpackYAMLError("error parsing YMAL spec:", str(e))
+ raise SpackYAMLError("error parsing YAML spec:", str(e))
for node in yfile['spec']:
name = next(iter(node))
@@ -815,6 +834,7 @@ class Spec(object):
with requirements of its pacakges. See flatten() and normalize() for
more details on this.
"""
+
if self._concrete:
return
@@ -827,7 +847,32 @@ class Spec(object):
self._concretize_helper())
changed = any(changes)
force=True
- self._concrete = True
+
+ for s in self.traverse():
+ # After concretizing, assign namespaces to anything left.
+ # Note that this doesn't count as a "change". The repository
+ # configuration is constant throughout a spack run, and
+ # normalize and concretize evaluate Packages using Repo.get(),
+ # which respects precedence. So, a namespace assignment isn't
+ # changing how a package name would have been interpreted and
+ # we can do it as late as possible to allow as much
+ # compatibility across repositories as possible.
+ if s.namespace is None:
+ s.namespace = spack.repo.repo_for_pkg(s.name).namespace
+
+ # Mark everything in the spec as concrete, as well.
+ self._mark_concrete()
+
+
+ def _mark_concrete(self):
+ """Mark this spec and its dependencies as concrete.
+
+ Only for internal use -- client code should use "concretize"
+ unless there is a need to force a spec to be concrete.
+ """
+ for s in self.traverse():
+ s._normal = True
+ s._concrete = True
def concretized(self):
@@ -902,7 +947,7 @@ class Spec(object):
the dependency. If no conditions are True (and we don't
depend on it), return None.
"""
- pkg = spack.db.get(self.name)
+ pkg = spack.repo.get(self.fullname)
conditions = pkg.dependencies[name]
# evaluate when specs to figure out constraints on the dependency.
@@ -1030,7 +1075,7 @@ class Spec(object):
any_change = False
changed = True
- pkg = spack.db.get(self.name)
+ pkg = spack.repo.get(self.fullname)
while changed:
changed = False
for dep_name in pkg.dependencies:
@@ -1051,18 +1096,17 @@ class Spec(object):
the root, and ONLY the ones that were explicitly provided are there.
Normalization turns a partial flat spec into a DAG, where:
- 1. ALL dependencies of the root package are in the DAG.
- 2. Each node's dependencies dict only contains its direct deps.
+ 1. Known dependencies of the root package are in the DAG.
+ 2. Each node's dependencies dict only contains its known direct deps.
3. There is only ONE unique spec for each package in the DAG.
* This includes virtual packages. If there a non-virtual
package that provides a virtual package that is in the spec,
then we replace the virtual package with the non-virtual one.
- 4. The spec DAG matches package DAG, including default variant values.
-
TODO: normalize should probably implement some form of cycle detection,
to ensure that the spec is actually a DAG.
+
"""
if self._normal and not force:
return False
@@ -1108,7 +1152,7 @@ class Spec(object):
for spec in self.traverse():
# Don't get a package for a virtual name.
if not spec.virtual:
- spack.db.get(spec.name)
+ spack.repo.get(spec.fullname)
# validate compiler in addition to the package name.
if spec.compiler:
@@ -1131,6 +1175,10 @@ class Spec(object):
if not self.name == other.name:
raise UnsatisfiableSpecNameError(self.name, other.name)
+ if other.namespace is not None:
+ if self.namespace is not None and other.namespace != self.namespace:
+ raise UnsatisfiableSpecNameError(self.fullname, other.fullname)
+
if not self.versions.overlaps(other.versions):
raise UnsatisfiableVersionSpecError(self.versions, other.versions)
@@ -1174,7 +1222,7 @@ class Spec(object):
# TODO: might want more detail than this, e.g. specific deps
# in violation. if this becomes a priority get rid of this
- # check and be more specici about what's wrong.
+ # check and be more specific about what's wrong.
if not other.satisfies_dependencies(self):
raise UnsatisfiableDependencySpecError(other, self)
@@ -1201,6 +1249,13 @@ class Spec(object):
return common
+ def constrained(self, other, deps=True):
+ """Return a constrained copy without modifying this spec."""
+ clone = self.copy(deps=deps)
+ clone.constrain(other, deps)
+ return clone
+
+
def dep_difference(self, other):
"""Returns dependencies in self that are not in other."""
mine = set(s.name for s in self.traverse(root=False))
@@ -1240,7 +1295,7 @@ class Spec(object):
# A concrete provider can satisfy a virtual dependency.
if not self.virtual and other.virtual:
- pkg = spack.db.get(self.name)
+ pkg = spack.repo.get(self.fullname)
if pkg.provides(other.name):
for provided, when_spec in pkg.provided.items():
if self.satisfies(when_spec, deps=False, strict=strict):
@@ -1252,6 +1307,11 @@ class Spec(object):
if self.name != other.name:
return False
+ # namespaces either match, or other doesn't require one.
+ if other.namespace is not None:
+ if self.namespace is not None and self.namespace != other.namespace:
+ return False
+
if self.versions and other.versions:
if not self.versions.satisfies(other.versions, strict=strict):
return False
@@ -1362,6 +1422,7 @@ class Spec(object):
self.variants = other.variants.copy()
self.variants.spec = self
self.external = other.external
+ self.namespace = other.namespace
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
@@ -1481,8 +1542,12 @@ class Spec(object):
def _cmp_node(self):
"""Comparison key for just *this node* and not its deps."""
- return (self.name, self.versions, self.variants,
- self.architecture, self.compiler)
+ return (self.name,
+ self.namespace,
+ self.versions,
+ self.variants,
+ self.architecture,
+ self.compiler)
def eq_node(self, other):
@@ -1496,11 +1561,15 @@ class Spec(object):
def _cmp_key(self):
- """Comparison key for this node and all dependencies *without*
- considering structure. This is the default, as
- normalization will restore structure.
+ """This returns a key for the spec *including* DAG structure.
+
+ The key is the concatenation of:
+ 1. A tuple describing this node in the DAG.
+ 2. The hash of each of this node's dependencies' cmp_keys.
"""
- return self._cmp_node() + (self.sorted_deps(),)
+ return self._cmp_node() + (
+ tuple(hash(self.dependencies[name])
+ for name in sorted(self.dependencies)),)
def colorized(self):
@@ -1512,6 +1581,7 @@ class Spec(object):
in the format string. The format strings you can provide are::
$_ Package name
+ $. Full package name (with namespace)
$@ Version with '@' prefix
$% Compiler with '%' prefix
$%@ Compiler with '%' prefix & compiler version with '@' prefix
@@ -1574,6 +1644,8 @@ class Spec(object):
if c == '_':
out.write(fmt % self.name)
+ elif c == '.':
+ out.write(fmt % self.fullname)
elif c == '@':
if self.versions and self.versions != _any_version:
write(fmt % (c + str(self.versions)), c)
@@ -1638,7 +1710,7 @@ class Spec(object):
write(fmt % str(self.architecture), '=')
elif named_str == 'SHA1':
if self.dependencies:
- out.write(fmt % str(self.dep_hash(8)))
+ out.write(fmt % str(self.dag_hash(7)))
elif named_str == 'SPACK_ROOT':
out.write(fmt % spack.prefix)
elif named_str == 'SPACK_INSTALL':
@@ -1689,8 +1761,8 @@ class Spec(object):
self.architecture, other.architecture)
#Dependency is not configurable
- if self.dep_hash() != other.dep_hash():
- return -1 if self.dep_hash() < other.dep_hash() else 1
+ if self.dag_hash() != other.dag_hash():
+ return -1 if self.dag_hash() < other.dag_hash() else 1
#Equal specs
return 0
@@ -1796,11 +1868,16 @@ class SpecParser(spack.parse.Parser):
def spec(self):
"""Parse a spec out of the input. If a spec is supplied, then initialize
and return it instead of creating a new one."""
- self.check_identifier()
+
+ spec_namespace, dot, spec_name = self.token.value.rpartition('.')
+ if not spec_namespace:
+ spec_namespace = None
+
+ self.check_identifier(spec_name)
# This will init the spec without calling __init__.
spec = Spec.__new__(Spec)
- spec.name = self.token.value
+ spec.name = spec_name
spec.versions = VersionList()
spec.variants = VariantMap(spec)
spec.architecture = None
@@ -1808,6 +1885,7 @@ class SpecParser(spack.parse.Parser):
spec.external = None
spec.dependents = DependencyMap()
spec.dependencies = DependencyMap()
+ spec.namespace = spec_namespace
spec._normal = False
spec._concrete = False
@@ -1901,12 +1979,14 @@ class SpecParser(spack.parse.Parser):
return compiler
- def check_identifier(self):
+ def check_identifier(self, id=None):
"""The only identifiers that can contain '.' are versions, but version
ids are context-sensitive so we have to check on a case-by-case
basis. Call this if we detect a version id where it shouldn't be.
"""
- if '.' in self.token.value:
+ if not id:
+ id = self.token.value
+ if '.' in id:
self.last_token_error("Identifier cannot contain '.'")
@@ -2097,4 +2177,4 @@ class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
class SpackYAMLError(spack.error.SpackError):
def __init__(self, msg, yaml_error):
- super(SpackError, self).__init__(msg, str(yaml_error))
+ super(SpackYAMLError, self).__init__(msg, str(yaml_error))
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index c70c7a84a4..dcf2dc1e6e 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -27,6 +27,7 @@ import re
import shutil
import tempfile
import sys
+from urlparse import urljoin
import llnl.util.tty as tty
from llnl.util.filesystem import *
@@ -83,14 +84,18 @@ class Stage(object):
stage object later). If name is not provided, then this
stage will be given a unique name automatically.
"""
+ # TODO: fetch/stage coupling needs to be reworked -- the logic
+ # TODO: here is convoluted and not modular enough.
if isinstance(url_or_fetch_strategy, basestring):
self.fetcher = fs.from_url(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
else:
raise ValueError("Can't construct Stage without url or fetch strategy")
-
self.fetcher.set_stage(self)
+ self.default_fetcher = self.fetcher # self.fetcher can change with mirrors.
+ self.skip_checksum_for_mirror = True # used for mirrored archives of repositories.
+
self.name = kwargs.get('name')
self.mirror_path = kwargs.get('mirror_path')
@@ -199,17 +204,18 @@ class Stage(object):
@property
def archive_file(self):
"""Path to the source archive within this stage directory."""
- if not isinstance(self.fetcher, fs.URLFetchStrategy):
- return None
+ paths = []
+ if isinstance(self.fetcher, fs.URLFetchStrategy):
+ paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
- paths = [os.path.join(self.path, os.path.basename(self.fetcher.url))]
if self.mirror_path:
paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
for path in paths:
if os.path.exists(path):
return path
- return None
+ else:
+ return None
@property
@@ -239,36 +245,58 @@ class Stage(object):
"""Downloads an archive or checks out code from a repository."""
self.chdir()
- fetchers = [self.fetcher]
+ fetchers = [self.default_fetcher]
# TODO: move mirror logic out of here and clean it up!
+ # TODO: Or @alalazo may have some ideas about how to use a
+ # TODO: CompositeFetchStrategy here.
+ self.skip_checksum_for_mirror = True
if self.mirror_path:
- urls = ["%s/%s" % (m, self.mirror_path) for m in _get_mirrors()]
+ mirrors = spack.config.get_config('mirrors')
+ urls = [urljoin(u, self.mirror_path) for name, u in mirrors.items()]
+ # If this archive is normally fetched from a tarball URL,
+ # then use the same digest. `spack mirror` ensures that
+ # the checksum will be the same.
digest = None
- if isinstance(self.fetcher, fs.URLFetchStrategy):
- digest = self.fetcher.digest
- fetchers = [fs.URLFetchStrategy(url, digest)
- for url in urls] + fetchers
- for f in fetchers:
- f.set_stage(self)
+ if isinstance(self.default_fetcher, fs.URLFetchStrategy):
+ digest = self.default_fetcher.digest
+
+ # Have to skip the checkesum for things archived from
+ # repositories. How can this be made safer?
+ self.skip_checksum_for_mirror = not bool(digest)
+
+ for url in urls:
+ fetchers.insert(0, fs.URLFetchStrategy(url, digest))
for fetcher in fetchers:
try:
- fetcher.fetch()
+ fetcher.set_stage(self)
+ self.fetcher = fetcher
+ self.fetcher.fetch()
break
except spack.error.SpackError, e:
tty.msg("Fetching from %s failed." % fetcher)
tty.debug(e)
continue
else:
- tty.die("All fetchers failed for %s" % self.name)
+ errMessage = "All fetchers failed for %s" % self.name
+ self.fetcher = self.default_fetcher
+ raise fs.FetchError(errMessage, None)
def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
- self.fetcher.check()
+ if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
+ tty.warn("Fetching from mirror without a checksum!",
+ "This package is normally checked out from a version "
+ "control system, but it has been archived on a spack "
+ "mirror. This means we cannot know a checksum for the "
+ "tarball in advance. Be sure that your connection to "
+ "this mirror is secure!.")
+ else:
+ self.fetcher.check()
def expand_archive(self):
@@ -345,7 +373,8 @@ class DIYStage(object):
def _get_mirrors():
"""Get mirrors from spack configuration."""
- return [path for name, path in spack.config.get_mirror_config()]
+ config = spack.config.get_config('mirrors')
+ return [val for name, val in config.iteritems()]
def ensure_access(file=spack.stage_path):
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 6b3715be6f..a569cbbf35 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -24,7 +24,10 @@
##############################################################################
import sys
import unittest
+import nose
+from spack.test.tally_plugin import Tally
+from llnl.util.filesystem import join_path
import llnl.util.tty as tty
from llnl.util.tty.colify import colify
@@ -56,7 +59,12 @@ test_names = ['versions',
'spec_yaml',
'optional_deps',
'make_executable',
- 'configure_guess']
+ 'configure_guess',
+ 'unit_install',
+ 'lock',
+ 'database',
+ 'namespace_trie',
+ 'yaml']
def list_tests():
@@ -64,7 +72,7 @@ def list_tests():
return test_names
-def run(names, verbose=False):
+def run(names, outputDir, verbose=False):
"""Run tests with the supplied names. Names should be a list. If
it's empty, run ALL of Spack's tests."""
verbosity = 1 if not verbose else 2
@@ -76,30 +84,33 @@ def run(names, verbose=False):
if test not in test_names:
tty.error("%s is not a valid spack test name." % test,
"Valid names are:")
- colify(test_names, indent=4)
+ colify(sorted(test_names), indent=4)
sys.exit(1)
-
- runner = unittest.TextTestRunner(verbosity=verbosity)
-
- testsRun = errors = failures = 0
+
+ tally = Tally()
for test in names:
module = 'spack.test.' + test
print module
- suite = unittest.defaultTestLoader.loadTestsFromName(module)
-
+
tty.msg("Running test: %s" % test)
- result = runner.run(suite)
- testsRun += result.testsRun
- errors += len(result.errors)
- failures += len(result.failures)
+
+ runOpts = ["--with-%s" % spack.test.tally_plugin.Tally.name]
+
+ if outputDir:
+ xmlOutputFname = "unittests-{0}.xml".format(test)
+ xmlOutputPath = join_path(outputDir, xmlOutputFname)
+ runOpts += ["--with-xunit",
+ "--xunit-file={0}".format(xmlOutputPath)]
+ argv = [""] + runOpts + [module]
+ result = nose.run(argv=argv, addplugins=[tally])
- succeeded = not errors and not failures
+ succeeded = not tally.failCount and not tally.errorCount
tty.msg("Tests Complete.",
- "%5d tests run" % testsRun,
- "%5d failures" % failures,
- "%5d errors" % errors)
+ "%5d tests run" % tally.numberOfTestsRun,
+ "%5d failures" % tally.failCount,
+ "%5d errors" % tally.errorCount)
- if not errors and not failures:
+ if succeeded:
tty.info("OK", format='g')
else:
tty.info("FAIL", format='r')
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index aa16f9b351..905af28a06 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -65,17 +65,17 @@ class CompilerTest(unittest.TestCase):
def check_cc(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
- self.assertEqual(self.cc(*args, return_output=True).strip(), expected)
+ self.assertEqual(self.cc(*args, output=str).strip(), expected)
def check_ld(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
- self.assertEqual(self.ld(*args, return_output=True).strip(), expected)
+ self.assertEqual(self.ld(*args, output=str).strip(), expected)
def check_cpp(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
- self.assertEqual(self.cpp(*args, return_output=True).strip(), expected)
+ self.assertEqual(self.cpp(*args, output=str).strip(), expected)
def test_vcheck_mode(self):
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index f81a2f5af8..3cdbfa7de9 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -125,22 +125,22 @@ class ConcretizeTest(MockPackagesTest):
we ask for some advanced version.
"""
self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
- for spec in spack.db.providers_for('mpi@2.1')))
+ for spec in spack.repo.providers_for('mpi@2.1')))
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
- for spec in spack.db.providers_for('mpi@2.2')))
+ for spec in spack.repo.providers_for('mpi@2.2')))
self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
- for spec in spack.db.providers_for('mpi@2.2')))
+ for spec in spack.repo.providers_for('mpi@2.2')))
self.assertTrue(not any(spec.satisfies('mpich@:1')
- for spec in spack.db.providers_for('mpi@2')))
+ for spec in spack.repo.providers_for('mpi@2')))
self.assertTrue(not any(spec.satisfies('mpich@:1')
- for spec in spack.db.providers_for('mpi@3')))
+ for spec in spack.repo.providers_for('mpi@3')))
self.assertTrue(not any(spec.satisfies('mpich2')
- for spec in spack.db.providers_for('mpi@3')))
+ for spec in spack.repo.providers_for('mpi@3')))
def test_virtual_is_fully_expanded_for_callpath(self):
@@ -216,7 +216,7 @@ class ConcretizeTest(MockPackagesTest):
def test_external_and_virtual(self):
spec = Spec('externaltest')
spec.concretize()
- self.assertTrue(spec['externaltool'].external, '/path/to/external_tool')
- self.assertTrue(spec['stuff'].external, '/path/to/external_virtual_gcc')
+ self.assertEqual(spec['externaltool'].external, '/path/to/external_tool')
+ self.assertEqual(spec['stuff'].external, '/path/to/external_virtual_gcc')
self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
self.assertTrue(spec['stuff'].compiler.satisfies('gcc'))
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index 790b22f3b0..d8be5a855b 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -26,49 +26,95 @@ import unittest
import shutil
import os
from tempfile import mkdtemp
+from ordereddict_backport import OrderedDict
import spack
-from spack.packages import PackageDB
+import spack.config
from spack.test.mock_packages_test import *
+# Some sample compiler config data
+a_comps = {
+ "all": {
+ "gcc@4.7.3" : {
+ "cc" : "/gcc473",
+ "cxx": "/g++473",
+ "f77": None,
+ "fc" : None },
+ "gcc@4.5.0" : {
+ "cc" : "/gcc450",
+ "cxx": "/g++450",
+ "f77": "/gfortran",
+ "fc" : "/gfortran" },
+ "clang@3.3" : {
+ "cc" : "<overwritten>",
+ "cxx": "<overwritten>",
+ "f77": "<overwritten>",
+ "fc" : "<overwritten>" }
+ }
+}
+
+b_comps = {
+ "all": {
+ "icc@10.0" : {
+ "cc" : "/icc100",
+ "cxx": "/icc100",
+ "f77": None,
+ "fc" : None },
+ "icc@11.1" : {
+ "cc" : "/icc111",
+ "cxx": "/icp111",
+ "f77": "/ifort",
+ "fc" : "/ifort" },
+ "clang@3.3" : {
+ "cc" : "/clang",
+ "cxx": "/clang++",
+ "f77": None,
+ "fc" : None}
+ }
+}
+
class ConfigTest(MockPackagesTest):
def setUp(self):
- self.initmock()
+ super(ConfigTest, self).setUp()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
- spack.config.config_scopes = [('test_low_priority', os.path.join(self.tmp_dir, 'low')),
- ('test_high_priority', os.path.join(self.tmp_dir, 'high'))]
+ spack.config.config_scopes = OrderedDict()
+ spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low'))
+ spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))
def tearDown(self):
- self.cleanmock()
+ super(ConfigTest, self).tearDown()
shutil.rmtree(self.tmp_dir, True)
- def check_config(self, comps):
- config = spack.config.get_compilers_config()
- compiler_list = ['cc', 'cxx', 'f77', 'f90']
- for key in comps:
+
+ def check_config(self, comps, *compiler_names):
+ """Check that named compilers in comps match Spack's config."""
+ config = spack.config.get_config('compilers')
+ compiler_list = ['cc', 'cxx', 'f77', 'fc']
+ for key in compiler_names:
for c in compiler_list:
- if comps[key][c] == '/bad':
- continue
- self.assertEqual(comps[key][c], config[key][c])
+ expected = comps['all'][key][c]
+ actual = config['all'][key][c]
+ self.assertEqual(expected, actual)
- def test_write_key(self):
- a_comps = {"gcc@4.7.3" : { "cc" : "/gcc473", "cxx" : "/g++473", "f77" : None, "f90" : None },
- "gcc@4.5.0" : { "cc" : "/gcc450", "cxx" : "/g++450", "f77" : "/gfortran", "f90" : "/gfortran" },
- "clang@3.3" : { "cc" : "/bad", "cxx" : "/bad", "f77" : "/bad", "f90" : "/bad" }}
+ def test_write_key_in_memory(self):
+ # Write b_comps "on top of" a_comps.
+ spack.config.update_config('compilers', a_comps, 'test_low_priority')
+ spack.config.update_config('compilers', b_comps, 'test_high_priority')
- b_comps = {"icc@10.0" : { "cc" : "/icc100", "cxx" : "/icc100", "f77" : None, "f90" : None },
- "icc@11.1" : { "cc" : "/icc111", "cxx" : "/icp111", "f77" : "/ifort", "f90" : "/ifort" },
- "clang@3.3" : { "cc" : "/clang", "cxx" : "/clang++", "f77" : None, "f90" : None}}
+ # Make sure the config looks how we expect.
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
- spack.config.add_to_compiler_config(a_comps, 'test_low_priority')
- spack.config.add_to_compiler_config(b_comps, 'test_high_priority')
- self.check_config(a_comps)
- self.check_config(b_comps)
+ def test_write_key_to_disk(self):
+ # Write b_comps "on top of" a_comps.
+ spack.config.update_config('compilers', a_comps, 'test_low_priority')
+ spack.config.update_config('compilers', b_comps, 'test_high_priority')
+ # Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
- self.check_config(a_comps)
- self.check_config(b_comps)
-
+ # Same check again, to ensure consistency.
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/configure_guess.py
index 766dd51d52..a4e8565b62 100644
--- a/lib/spack/spack/test/configure_guess.py
+++ b/lib/spack/spack/test/configure_guess.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
new file mode 100644
index 0000000000..0205f4b8ce
--- /dev/null
+++ b/lib/spack/spack/test/database.py
@@ -0,0 +1,353 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+These tests check the database is functioning properly,
+both in memory and in its file
+"""
+import tempfile
+import shutil
+import multiprocessing
+
+from llnl.util.lock import *
+from llnl.util.filesystem import join_path
+
+import spack
+from spack.database import Database
+from spack.directory_layout import YamlDirectoryLayout
+from spack.test.mock_packages_test import *
+
+from llnl.util.tty.colify import colify
+
+def _print_ref_counts():
+ """Print out all ref counts for the graph used here, for debugging"""
+ recs = []
+
+ def add_rec(spec):
+ cspecs = spack.installed_db.query(spec, installed=any)
+
+ if not cspecs:
+ recs.append("[ %-7s ] %-20s-" % ('', spec))
+ else:
+ key = cspecs[0].dag_hash()
+ rec = spack.installed_db.get_record(cspecs[0])
+ recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count))
+
+ with spack.installed_db.read_transaction():
+ add_rec('mpileaks ^mpich')
+ add_rec('callpath ^mpich')
+ add_rec('mpich')
+
+ add_rec('mpileaks ^mpich2')
+ add_rec('callpath ^mpich2')
+ add_rec('mpich2')
+
+ add_rec('mpileaks ^zmpi')
+ add_rec('callpath ^zmpi')
+ add_rec('zmpi')
+ add_rec('fake')
+
+ add_rec('dyninst')
+ add_rec('libdwarf')
+ add_rec('libelf')
+
+ colify(recs, cols=3)
+
+
+class DatabaseTest(MockPackagesTest):
+
+ def _mock_install(self, spec):
+ s = Spec(spec)
+ s.concretize()
+ pkg = spack.repo.get(s)
+ pkg.do_install(fake=True)
+
+
+ def _mock_remove(self, spec):
+ specs = spack.installed_db.query(spec)
+ assert(len(specs) == 1)
+ spec = specs[0]
+ spec.package.do_uninstall(spec)
+
+
+ def setUp(self):
+ super(DatabaseTest, self).setUp()
+ #
+ # TODO: make the mockup below easier.
+ #
+
+ # Make a fake install directory
+ self.install_path = tempfile.mkdtemp()
+ self.spack_install_path = spack.install_path
+ spack.install_path = self.install_path
+
+ self.install_layout = YamlDirectoryLayout(self.install_path)
+ self.spack_install_layout = spack.install_layout
+ spack.install_layout = self.install_layout
+
+ # Make fake database and fake install directory.
+ self.installed_db = Database(self.install_path)
+ self.spack_installed_db = spack.installed_db
+ spack.installed_db = self.installed_db
+
+ # make a mock database with some packages installed note that
+ # the ref count for dyninst here will be 3, as it's recycled
+ # across each install.
+ #
+ # Here is what the mock DB looks like:
+ #
+ # o mpileaks o mpileaks' o mpileaks''
+ # |\ |\ |\
+ # | o callpath | o callpath' | o callpath''
+ # |/| |/| |/|
+ # o | mpich o | mpich2 o | zmpi
+ # | | o | fake
+ # | | |
+ # | |______________/
+ # | .____________/
+ # |/
+ # o dyninst
+ # |\
+ # | o libdwarf
+ # |/
+ # o libelf
+ #
+
+ # Transaction used to avoid repeated writes.
+ with spack.installed_db.write_transaction():
+ self._mock_install('mpileaks ^mpich')
+ self._mock_install('mpileaks ^mpich2')
+ self._mock_install('mpileaks ^zmpi')
+
+
+ def tearDown(self):
+ super(DatabaseTest, self).tearDown()
+ shutil.rmtree(self.install_path)
+ spack.install_path = self.spack_install_path
+ spack.install_layout = self.spack_install_layout
+ spack.installed_db = self.spack_installed_db
+
+
+ def test_005_db_exists(self):
+ """Make sure db cache file exists after creating."""
+ index_file = join_path(self.install_path, '.spack-db', 'index.yaml')
+ lock_file = join_path(self.install_path, '.spack-db', 'lock')
+
+ self.assertTrue(os.path.exists(index_file))
+ self.assertTrue(os.path.exists(lock_file))
+
+
+ def test_010_all_install_sanity(self):
+ """Ensure that the install layout reflects what we think it does."""
+ all_specs = spack.install_layout.all_specs()
+ self.assertEqual(len(all_specs), 13)
+
+ # query specs with multiple configurations
+ mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
+ callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
+ mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+
+ self.assertEqual(len(mpileaks_specs), 3)
+ self.assertEqual(len(callpath_specs), 3)
+ self.assertEqual(len(mpi_specs), 3)
+
+ # query specs with single configurations
+ dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
+ libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')]
+ libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
+
+ self.assertEqual(len(dyninst_specs), 1)
+ self.assertEqual(len(libdwarf_specs), 1)
+ self.assertEqual(len(libelf_specs), 1)
+
+ # Query by dependency
+ self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]), 1)
+ self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]), 1)
+ self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]), 1)
+
+
+ def test_015_write_and_read(self):
+ # write and read DB
+ with spack.installed_db.write_transaction():
+ specs = spack.installed_db.query()
+ recs = [spack.installed_db.get_record(s) for s in specs]
+
+ for spec, rec in zip(specs, recs):
+ new_rec = spack.installed_db.get_record(spec)
+ self.assertEqual(new_rec.ref_count, rec.ref_count)
+ self.assertEqual(new_rec.spec, rec.spec)
+ self.assertEqual(new_rec.path, rec.path)
+ self.assertEqual(new_rec.installed, rec.installed)
+
+
+ def _check_db_sanity(self):
+ """Utiilty function to check db against install layout."""
+ expected = sorted(spack.install_layout.all_specs())
+ actual = sorted(self.installed_db.query())
+
+ self.assertEqual(len(expected), len(actual))
+ for e, a in zip(expected, actual):
+ self.assertEqual(e, a)
+
+
+ def test_020_db_sanity(self):
+ """Make sure query() returns what's actually in the db."""
+ self._check_db_sanity()
+
+
+ def test_030_db_sanity_from_another_process(self):
+ def read_and_modify():
+ self._check_db_sanity() # check that other process can read DB
+ with self.installed_db.write_transaction():
+ self._mock_remove('mpileaks ^zmpi')
+
+ p = multiprocessing.Process(target=read_and_modify, args=())
+ p.start()
+ p.join()
+
+ # ensure child process change is visible in parent process
+ with self.installed_db.read_transaction():
+ self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 0)
+
+
+ def test_040_ref_counts(self):
+ """Ensure that we got ref counts right when we read the DB."""
+ self.installed_db._check_ref_counts()
+
+
+ def test_050_basic_query(self):
+ """Ensure that querying the database is consistent with what is installed."""
+ # query everything
+ self.assertEqual(len(spack.installed_db.query()), 13)
+
+ # query specs with multiple configurations
+ mpileaks_specs = self.installed_db.query('mpileaks')
+ callpath_specs = self.installed_db.query('callpath')
+ mpi_specs = self.installed_db.query('mpi')
+
+ self.assertEqual(len(mpileaks_specs), 3)
+ self.assertEqual(len(callpath_specs), 3)
+ self.assertEqual(len(mpi_specs), 3)
+
+ # query specs with single configurations
+ dyninst_specs = self.installed_db.query('dyninst')
+ libdwarf_specs = self.installed_db.query('libdwarf')
+ libelf_specs = self.installed_db.query('libelf')
+
+ self.assertEqual(len(dyninst_specs), 1)
+ self.assertEqual(len(libdwarf_specs), 1)
+ self.assertEqual(len(libelf_specs), 1)
+
+ # Query by dependency
+ self.assertEqual(len(self.installed_db.query('mpileaks ^mpich')), 1)
+ self.assertEqual(len(self.installed_db.query('mpileaks ^mpich2')), 1)
+ self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 1)
+
+
+ def _check_remove_and_add_package(self, spec):
+ """Remove a spec from the DB, then add it and make sure everything's
+ still ok once it is added. This checks that it was
+ removed, that it's back when added again, and that ref
+ counts are consistent.
+ """
+ original = self.installed_db.query()
+ self.installed_db._check_ref_counts()
+
+ # Remove spec
+ concrete_spec = self.installed_db.remove(spec)
+ self.installed_db._check_ref_counts()
+ remaining = self.installed_db.query()
+
+ # ensure spec we removed is gone
+ self.assertEqual(len(original) - 1, len(remaining))
+ self.assertTrue(all(s in original for s in remaining))
+ self.assertTrue(concrete_spec not in remaining)
+
+ # add it back and make sure everything is ok.
+ self.installed_db.add(concrete_spec, "")
+ installed = self.installed_db.query()
+ self.assertEqual(len(installed), len(original))
+
+ # sanity check against direcory layout and check ref counts.
+ self._check_db_sanity()
+ self.installed_db._check_ref_counts()
+
+
+ def test_060_remove_and_add_root_package(self):
+ self._check_remove_and_add_package('mpileaks ^mpich')
+
+
+ def test_070_remove_and_add_dependency_package(self):
+ self._check_remove_and_add_package('dyninst')
+
+
+ def test_080_root_ref_counts(self):
+ rec = self.installed_db.get_record('mpileaks ^mpich')
+
+ # Remove a top-level spec from the DB
+ self.installed_db.remove('mpileaks ^mpich')
+
+ # record no longer in DB
+ self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), [])
+
+ # record's deps have updated ref_counts
+ self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 0)
+ self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1)
+
+ # put the spec back
+ self.installed_db.add(rec.spec, rec.path)
+
+ # record is present again
+ self.assertEqual(len(self.installed_db.query('mpileaks ^mpich', installed=any)), 1)
+
+ # dependencies have ref counts updated
+ self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 1)
+ self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2)
+
+
+ def test_090_non_root_ref_counts(self):
+ mpileaks_mpich_rec = self.installed_db.get_record('mpileaks ^mpich')
+ callpath_mpich_rec = self.installed_db.get_record('callpath ^mpich')
+
+ # "force remove" a non-root spec from the DB
+ self.installed_db.remove('callpath ^mpich')
+
+ # record still in DB but marked uninstalled
+ self.assertEqual(self.installed_db.query('callpath ^mpich', installed=True), [])
+ self.assertEqual(len(self.installed_db.query('callpath ^mpich', installed=any)), 1)
+
+ # record and its deps have same ref_counts
+ self.assertEqual(self.installed_db.get_record('callpath ^mpich', installed=any).ref_count, 1)
+ self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2)
+
+ # remove only dependent of uninstalled callpath record
+ self.installed_db.remove('mpileaks ^mpich')
+
+ # record and parent are completely gone.
+ self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), [])
+ self.assertEqual(self.installed_db.query('callpath ^mpich', installed=any), [])
+
+ # mpich ref count updated properly.
+ mpich_rec = self.installed_db.get_record('mpich')
+ self.assertEqual(mpich_rec.ref_count, 0)
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index b3ad8efec4..8412f4a9b4 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -34,23 +34,27 @@ from llnl.util.filesystem import *
import spack
from spack.spec import Spec
-from spack.packages import PackageDB
+from spack.repository import RepoPath
from spack.directory_layout import YamlDirectoryLayout
+from spack.test.mock_packages_test import *
+
# number of packages to test (to reduce test time)
max_packages = 10
-class DirectoryLayoutTest(unittest.TestCase):
+class DirectoryLayoutTest(MockPackagesTest):
"""Tests that a directory layout works correctly and produces a
consistent install path."""
def setUp(self):
+ super(DirectoryLayoutTest, self).setUp()
self.tmpdir = tempfile.mkdtemp()
self.layout = YamlDirectoryLayout(self.tmpdir)
def tearDown(self):
+ super(DirectoryLayoutTest, self).tearDown()
shutil.rmtree(self.tmpdir, ignore_errors=True)
self.layout = None
@@ -62,9 +66,12 @@ class DirectoryLayoutTest(unittest.TestCase):
finally that the directory can be removed by the directory
layout.
"""
- packages = list(spack.db.all_packages())[:max_packages]
+ packages = list(spack.repo.all_packages())[:max_packages]
for pkg in packages:
+ if pkg.name.startswith('external'):
+ #External package tests cannot be installed
+ continue
spec = pkg.spec
# If a spec fails to concretize, just skip it. If it is a
@@ -123,17 +130,17 @@ class DirectoryLayoutTest(unittest.TestCase):
information about installed packages' specs to uninstall
or query them again if the package goes away.
"""
- mock_db = PackageDB(spack.mock_packages_path)
+ mock_db = RepoPath(spack.mock_packages_path)
not_in_mock = set.difference(
- set(spack.db.all_package_names()),
+ set(spack.repo.all_package_names()),
set(mock_db.all_package_names()))
packages = list(not_in_mock)[:max_packages]
# Create all the packages that are not in mock.
installed_specs = {}
for pkg_name in packages:
- spec = spack.db.get(pkg_name).spec
+ spec = spack.repo.get(pkg_name).spec
# If a spec fails to concretize, just skip it. If it is a
# real error, it will be caught by concretization tests.
@@ -145,8 +152,7 @@ class DirectoryLayoutTest(unittest.TestCase):
self.layout.create_install_directory(spec)
installed_specs[spec] = self.layout.path_for_spec(spec)
- tmp = spack.db
- spack.db = mock_db
+ spack.repo.swap(mock_db)
# Now check that even without the package files, we know
# enough to read a spec from the spec file.
@@ -161,16 +167,19 @@ class DirectoryLayoutTest(unittest.TestCase):
self.assertTrue(spec.eq_dag(spec_from_file))
self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
- spack.db = tmp
+ spack.repo.swap(mock_db)
def test_find(self):
"""Test that finding specs within an install layout works."""
- packages = list(spack.db.all_packages())[:max_packages]
+ packages = list(spack.repo.all_packages())[:max_packages]
# Create install prefixes for all packages in the list
installed_specs = {}
for pkg in packages:
+ if pkg.name.startswith('external'):
+ #External package tests cannot be installed
+ continue
spec = pkg.spec.concretized()
installed_specs[spec.name] = spec
self.layout.create_install_directory(spec)
diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py
index 9700bd7533..3813079065 100644
--- a/lib/spack/spack/test/git_fetch.py
+++ b/lib/spack/spack/test/git_fetch.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -50,7 +50,7 @@ class GitFetchTest(MockPackagesTest):
spec = Spec('git-test')
spec.concretize()
- self.pkg = spack.db.get(spec, new=True)
+ self.pkg = spack.repo.get(spec, new=True)
def tearDown(self):
diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py
index 531dfabaa1..ee8327aec8 100644
--- a/lib/spack/spack/test/hg_fetch.py
+++ b/lib/spack/spack/test/hg_fetch.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -47,7 +47,7 @@ class HgFetchTest(MockPackagesTest):
spec = Spec('hg-test')
spec.concretize()
- self.pkg = spack.db.get(spec, new=True)
+ self.pkg = spack.repo.get(spec, new=True)
def tearDown(self):
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index 5659e97a4d..628329a423 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -78,7 +78,7 @@ class InstallTest(MockPackagesTest):
self.assertTrue(spec.concrete)
# Get the package
- pkg = spack.db.get(spec)
+ pkg = spack.repo.get(spec)
# Fake the URL for the package so it downloads from a file.
pkg.fetcher = URLFetchStrategy(self.repo.url)
diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py
index 9e887ecc49..886b7ef4c5 100644
--- a/lib/spack/spack/test/link_tree.py
+++ b/lib/spack/spack/test/link_tree.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py
new file mode 100644
index 0000000000..bc68df01db
--- /dev/null
+++ b/lib/spack/spack/test/lock.py
@@ -0,0 +1,266 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+These tests ensure that our lock works correctly.
+"""
+import unittest
+import os
+import tempfile
+import shutil
+from multiprocessing import Process
+
+from llnl.util.lock import *
+from llnl.util.filesystem import join_path, touch
+
+from spack.util.multiproc import Barrier
+
+# This is the longest a failed test will take, as the barriers will
+# time out and raise an exception.
+barrier_timeout = 5
+
+
+class LockTest(unittest.TestCase):
+
+ def setUp(self):
+ self.tempdir = tempfile.mkdtemp()
+ self.lock_path = join_path(self.tempdir, 'lockfile')
+ touch(self.lock_path)
+
+
+ def tearDown(self):
+ shutil.rmtree(self.tempdir, ignore_errors=True)
+
+
+ def multiproc_test(self, *functions):
+ """Order some processes using simple barrier synchronization."""
+ b = Barrier(len(functions), timeout=barrier_timeout)
+ procs = [Process(target=f, args=(b,)) for f in functions]
+ for p in procs: p.start()
+ for p in procs:
+ p.join()
+ self.assertEqual(p.exitcode, 0)
+
+
+ #
+ # Process snippets below can be composed into tests.
+ #
+ def acquire_write(self, barrier):
+ lock = Lock(self.lock_path)
+ lock.acquire_write() # grab exclusive lock
+ barrier.wait()
+ barrier.wait() # hold the lock until exception raises in other procs.
+
+ def acquire_read(self, barrier):
+ lock = Lock(self.lock_path)
+ lock.acquire_read() # grab shared lock
+ barrier.wait()
+ barrier.wait() # hold the lock until exception raises in other procs.
+
+ def timeout_write(self, barrier):
+ lock = Lock(self.lock_path)
+ barrier.wait() # wait for lock acquire in first process
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ barrier.wait()
+
+ def timeout_read(self, barrier):
+ lock = Lock(self.lock_path)
+ barrier.wait() # wait for lock acquire in first process
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait()
+
+
+ #
+ # Test that exclusive locks on other processes time out when an
+ # exclusive lock is held.
+ #
+ def test_write_lock_timeout_on_write(self):
+ self.multiproc_test(self.acquire_write, self.timeout_write)
+
+ def test_write_lock_timeout_on_write_2(self):
+ self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write)
+
+ def test_write_lock_timeout_on_write_3(self):
+ self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write, self.timeout_write)
+
+
+ #
+ # Test that shared locks on other processes time out when an
+ # exclusive lock is held.
+ #
+ def test_read_lock_timeout_on_write(self):
+ self.multiproc_test(self.acquire_write, self.timeout_read)
+
+ def test_read_lock_timeout_on_write_2(self):
+ self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read)
+
+ def test_read_lock_timeout_on_write_3(self):
+ self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read, self.timeout_read)
+
+
+ #
+ # Test that exclusive locks time out when shared locks are held.
+ #
+ def test_write_lock_timeout_on_read(self):
+ self.multiproc_test(self.acquire_read, self.timeout_write)
+
+ def test_write_lock_timeout_on_read_2(self):
+ self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write)
+
+ def test_write_lock_timeout_on_read_3(self):
+ self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write, self.timeout_write)
+
+
+ #
+ # Test that exclusive locks time while lots of shared locks are held.
+ #
+ def test_write_lock_timeout_with_multiple_readers_2_1(self):
+ self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write)
+
+ def test_write_lock_timeout_with_multiple_readers_2_2(self):
+ self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write)
+
+ def test_write_lock_timeout_with_multiple_readers_3_1(self):
+ self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write)
+
+ def test_write_lock_timeout_with_multiple_readers_3_2(self):
+ self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write)
+
+
+ #
+ # Longer test case that ensures locks are reusable. Ordering is
+ # enforced by barriers throughout -- steps are shown with numbers.
+ #
+ def test_complex_acquire_and_release_chain(self):
+ def p1(barrier):
+ lock = Lock(self.lock_path)
+
+ lock.acquire_write()
+ barrier.wait() # ---------------------------------------- 1
+ # others test timeout
+ barrier.wait() # ---------------------------------------- 2
+ lock.release_write() # release and others acquire read
+ barrier.wait() # ---------------------------------------- 3
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ lock.acquire_read()
+ barrier.wait() # ---------------------------------------- 4
+ lock.release_read()
+ barrier.wait() # ---------------------------------------- 5
+
+ # p2 upgrades read to write
+ barrier.wait() # ---------------------------------------- 6
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait() # ---------------------------------------- 7
+ # p2 releases write and read
+ barrier.wait() # ---------------------------------------- 8
+
+ # p3 acquires read
+ barrier.wait() # ---------------------------------------- 9
+ # p3 upgrades read to write
+ barrier.wait() # ---------------------------------------- 10
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait() # ---------------------------------------- 11
+ # p3 releases locks
+ barrier.wait() # ---------------------------------------- 12
+ lock.acquire_read()
+ barrier.wait() # ---------------------------------------- 13
+ lock.release_read()
+
+
+ def p2(barrier):
+ lock = Lock(self.lock_path)
+
+ # p1 acquires write
+ barrier.wait() # ---------------------------------------- 1
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait() # ---------------------------------------- 2
+ lock.acquire_read()
+ barrier.wait() # ---------------------------------------- 3
+ # p1 tests shared read
+ barrier.wait() # ---------------------------------------- 4
+ # others release reads
+ barrier.wait() # ---------------------------------------- 5
+
+ lock.acquire_write() # upgrade read to write
+ barrier.wait() # ---------------------------------------- 6
+ # others test timeout
+ barrier.wait() # ---------------------------------------- 7
+ lock.release_write() # release read AND write (need both)
+ lock.release_read()
+ barrier.wait() # ---------------------------------------- 8
+
+ # p3 acquires read
+ barrier.wait() # ---------------------------------------- 9
+ # p3 upgrades read to write
+ barrier.wait() # ---------------------------------------- 10
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait() # ---------------------------------------- 11
+ # p3 releases locks
+ barrier.wait() # ---------------------------------------- 12
+ lock.acquire_read()
+ barrier.wait() # ---------------------------------------- 13
+ lock.release_read()
+
+
+ def p3(barrier):
+ lock = Lock(self.lock_path)
+
+ # p1 acquires write
+ barrier.wait() # ---------------------------------------- 1
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait() # ---------------------------------------- 2
+ lock.acquire_read()
+ barrier.wait() # ---------------------------------------- 3
+ # p1 tests shared read
+ barrier.wait() # ---------------------------------------- 4
+ lock.release_read()
+ barrier.wait() # ---------------------------------------- 5
+
+ # p2 upgrades read to write
+ barrier.wait() # ---------------------------------------- 6
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait() # ---------------------------------------- 7
+ # p2 releases write & read
+ barrier.wait() # ---------------------------------------- 8
+
+ lock.acquire_read()
+ barrier.wait() # ---------------------------------------- 9
+ lock.acquire_write()
+ barrier.wait() # ---------------------------------------- 10
+ # others test timeout
+ barrier.wait() # ---------------------------------------- 11
+ lock.release_read() # release read AND write in opposite
+ lock.release_write() # order from before on p2
+ barrier.wait() # ---------------------------------------- 12
+ lock.acquire_read()
+ barrier.wait() # ---------------------------------------- 13
+ lock.release_read()
+
+ self.multiproc_test(p1, p2, p3)
diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py
index c4bfeb2a03..d568a28d44 100644
--- a/lib/spack/spack/test/make_executable.py
+++ b/lib/spack/spack/test/make_executable.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -56,47 +56,47 @@ class MakeExecutableTest(unittest.TestCase):
def test_make_normal(self):
make = MakeExecutable('make', 8)
- self.assertEqual(make(return_output=True).strip(), '-j8')
- self.assertEqual(make('install', return_output=True).strip(), '-j8 install')
+ self.assertEqual(make(output=str).strip(), '-j8')
+ self.assertEqual(make('install', output=str).strip(), '-j8 install')
def test_make_explicit(self):
make = MakeExecutable('make', 8)
- self.assertEqual(make(parallel=True, return_output=True).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, return_output=True).strip(), '-j8 install')
+ self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
+ self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
def test_make_one_job(self):
make = MakeExecutable('make', 1)
- self.assertEqual(make(return_output=True).strip(), '')
- self.assertEqual(make('install', return_output=True).strip(), 'install')
+ self.assertEqual(make(output=str).strip(), '')
+ self.assertEqual(make('install', output=str).strip(), 'install')
def test_make_parallel_false(self):
make = MakeExecutable('make', 8)
- self.assertEqual(make(parallel=False, return_output=True).strip(), '')
- self.assertEqual(make('install', parallel=False, return_output=True).strip(), 'install')
+ self.assertEqual(make(parallel=False, output=str).strip(), '')
+ self.assertEqual(make('install', parallel=False, output=str).strip(), 'install')
def test_make_parallel_disabled(self):
make = MakeExecutable('make', 8)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
- self.assertEqual(make(return_output=True).strip(), '')
- self.assertEqual(make('install', return_output=True).strip(), 'install')
+ self.assertEqual(make(output=str).strip(), '')
+ self.assertEqual(make('install', output=str).strip(), 'install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
- self.assertEqual(make(return_output=True).strip(), '')
- self.assertEqual(make('install', return_output=True).strip(), 'install')
+ self.assertEqual(make(output=str).strip(), '')
+ self.assertEqual(make('install', output=str).strip(), 'install')
# These don't disable (false and random string)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
- self.assertEqual(make(return_output=True).strip(), '-j8')
- self.assertEqual(make('install', return_output=True).strip(), '-j8 install')
+ self.assertEqual(make(output=str).strip(), '-j8')
+ self.assertEqual(make('install', output=str).strip(), '-j8 install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
- self.assertEqual(make(return_output=True).strip(), '-j8')
- self.assertEqual(make('install', return_output=True).strip(), '-j8 install')
+ self.assertEqual(make(output=str).strip(), '-j8')
+ self.assertEqual(make('install', output=str).strip(), '-j8 install')
del os.environ['SPACK_NO_PARALLEL_MAKE']
@@ -106,20 +106,20 @@ class MakeExecutableTest(unittest.TestCase):
# These should work
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
- self.assertEqual(make(parallel=True, return_output=True).strip(), '')
- self.assertEqual(make('install', parallel=True, return_output=True).strip(), 'install')
+ self.assertEqual(make(parallel=True, output=str).strip(), '')
+ self.assertEqual(make('install', parallel=True, output=str).strip(), 'install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
- self.assertEqual(make(parallel=True, return_output=True).strip(), '')
- self.assertEqual(make('install', parallel=True, return_output=True).strip(), 'install')
+ self.assertEqual(make(parallel=True, output=str).strip(), '')
+ self.assertEqual(make('install', parallel=True, output=str).strip(), 'install')
# These don't disable (false and random string)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
- self.assertEqual(make(parallel=True, return_output=True).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, return_output=True).strip(), '-j8 install')
+ self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
+ self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
- self.assertEqual(make(parallel=True, return_output=True).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, return_output=True).strip(), '-j8 install')
+ self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
+ self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
del os.environ['SPACK_NO_PARALLEL_MAKE']
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index 89ab14359e..04e9e3db2e 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -55,7 +55,7 @@ class MirrorTest(MockPackagesTest):
spec.concretize()
# Get the package and fix its fetch args to point to a mock repo
- pkg = spack.db.get(spec)
+ pkg = spack.repo.get(spec)
repo = MockRepoClass()
self.repos[name] = repo
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index 00f81114af..bc1735efe6 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,43 +22,110 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import sys
+import os
+import shutil
import unittest
+import tempfile
+from ordereddict_backport import OrderedDict
+
+from llnl.util.filesystem import mkdirp
import spack
import spack.config
-from spack.packages import PackageDB
+from spack.repository import RepoPath
from spack.spec import Spec
+mock_compiler_config = """\
+compilers:
+ all:
+ clang@3.3:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ gcc@4.5.0:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+"""
-def set_pkg_dep(pkg, spec):
- """Alters dependence information for a package.
- Use this to mock up constraints.
- """
- spec = Spec(spec)
- spack.db.get(pkg).dependencies[spec.name] = { Spec(pkg) : spec }
-
+mock_packages_config = """\
+packages:
+ externaltool:
+ nobuild: True
+ paths:
+ externaltool@1.0%gcc@4.5.0: /path/to/external_tool
+ externalvirtual:
+ nobuild: True
+ paths:
+ externalvirtual@2.0%clang@3.3: /path/to/external_virtual_clang
+ externalvirtual@1.0%gcc@4.5.0: /path/to/external_virtual_gcc
+"""
class MockPackagesTest(unittest.TestCase):
def initmock(self):
# Use the mock packages database for these tests. This allows
# us to set up contrived packages that don't interfere with
# real ones.
- self.real_db = spack.db
- spack.db = PackageDB(spack.mock_packages_path)
+ self.db = RepoPath(spack.mock_packages_path)
+ spack.repo.swap(self.db)
spack.config.clear_config_caches()
self.real_scopes = spack.config.config_scopes
- spack.config.config_scopes = [
- ('site', spack.mock_site_config),
- ('user', spack.mock_user_config)]
+
+ # Mock up temporary configuration directories
+ self.temp_config = tempfile.mkdtemp()
+ self.mock_site_config = os.path.join(self.temp_config, 'site')
+ self.mock_user_config = os.path.join(self.temp_config, 'user')
+ mkdirp(self.mock_site_config)
+ mkdirp(self.mock_user_config)
+ for confs in [('compilers.yaml', mock_compiler_config), ('packages.yaml', mock_packages_config)]:
+ conf_yaml = os.path.join(self.mock_site_config, confs[0])
+ with open(conf_yaml, 'w') as f:
+ f.write(confs[1])
+
+ # TODO: Mocking this up is kind of brittle b/c ConfigScope
+ # TODO: constructor modifies config_scopes. Make it cleaner.
+ spack.config.config_scopes = OrderedDict()
+ spack.config.ConfigScope('site', self.mock_site_config)
+ spack.config.ConfigScope('user', self.mock_user_config)
+
+ # Store changes to the package's dependencies so we can
+ # restore later.
+ self.saved_deps = {}
+
+
+ def set_pkg_dep(self, pkg_name, spec):
+ """Alters dependence information for a package.
+
+ Adds a dependency on <spec> to pkg.
+ Use this to mock up constraints.
+ """
+ spec = Spec(spec)
+
+ # Save original dependencies before making any changes.
+ pkg = spack.repo.get(pkg_name)
+ if pkg_name not in self.saved_deps:
+ self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
+
+ # Change dep spec
+ pkg.dependencies[spec.name] = { Spec(pkg_name) : spec }
def cleanmock(self):
"""Restore the real packages path after any test."""
- spack.db = self.real_db
+ spack.repo.swap(self.db)
spack.config.config_scopes = self.real_scopes
+ shutil.rmtree(self.temp_config, ignore_errors=True)
spack.config.clear_config_caches()
+ # Restore dependency changes that happened during the test
+ for pkg_name, (pkg, deps) in self.saved_deps.items():
+ pkg.dependencies.clear()
+ pkg.dependencies.update(deps)
+
def setUp(self):
self.initmock()
@@ -66,5 +133,3 @@ class MockPackagesTest(unittest.TestCase):
def tearDown(self):
self.cleanmock()
-
-
diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py
index fd184e64bc..9738ba4e72 100644
--- a/lib/spack/spack/test/mock_repo.py
+++ b/lib/spack/spack/test/mock_repo.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -141,7 +141,7 @@ class MockGitRepo(MockVCSRepo):
self.url = self.path
def rev_hash(self, rev):
- return git('rev-parse', rev, return_output=True).strip()
+ return git('rev-parse', rev, output=str).strip()
class MockSvnRepo(MockVCSRepo):
@@ -193,4 +193,4 @@ class MockHgRepo(MockVCSRepo):
def get_rev(self):
"""Get current mercurial revision."""
- return hg('id', '-i', return_output=True).strip()
+ return hg('id', '-i', output=str).strip()
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index cd5d9e625e..7bf4ff0a0a 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -38,92 +38,92 @@ from spack.test.mock_packages_test import *
class MultiMethodTest(MockPackagesTest):
def test_no_version_match(self):
- pkg = spack.db.get('multimethod@2.0')
+ pkg = spack.repo.get('multimethod@2.0')
self.assertRaises(NoSuchMethodError, pkg.no_version_2)
def test_one_version_match(self):
- pkg = spack.db.get('multimethod@1.0')
+ pkg = spack.repo.get('multimethod@1.0')
self.assertEqual(pkg.no_version_2(), 1)
- pkg = spack.db.get('multimethod@3.0')
+ pkg = spack.repo.get('multimethod@3.0')
self.assertEqual(pkg.no_version_2(), 3)
- pkg = spack.db.get('multimethod@4.0')
+ pkg = spack.repo.get('multimethod@4.0')
self.assertEqual(pkg.no_version_2(), 4)
def test_version_overlap(self):
- pkg = spack.db.get('multimethod@2.0')
+ pkg = spack.repo.get('multimethod@2.0')
self.assertEqual(pkg.version_overlap(), 1)
- pkg = spack.db.get('multimethod@5.0')
+ pkg = spack.repo.get('multimethod@5.0')
self.assertEqual(pkg.version_overlap(), 2)
def test_mpi_version(self):
- pkg = spack.db.get('multimethod^mpich@3.0.4')
+ pkg = spack.repo.get('multimethod^mpich@3.0.4')
self.assertEqual(pkg.mpi_version(), 3)
- pkg = spack.db.get('multimethod^mpich2@1.2')
+ pkg = spack.repo.get('multimethod^mpich2@1.2')
self.assertEqual(pkg.mpi_version(), 2)
- pkg = spack.db.get('multimethod^mpich@1.0')
+ pkg = spack.repo.get('multimethod^mpich@1.0')
self.assertEqual(pkg.mpi_version(), 1)
def test_undefined_mpi_version(self):
- pkg = spack.db.get('multimethod^mpich@0.4')
+ pkg = spack.repo.get('multimethod^mpich@0.4')
self.assertEqual(pkg.mpi_version(), 1)
- pkg = spack.db.get('multimethod^mpich@1.4')
+ pkg = spack.repo.get('multimethod^mpich@1.4')
self.assertEqual(pkg.mpi_version(), 1)
def test_default_works(self):
- pkg = spack.db.get('multimethod%gcc')
+ pkg = spack.repo.get('multimethod%gcc')
self.assertEqual(pkg.has_a_default(), 'gcc')
- pkg = spack.db.get('multimethod%intel')
+ pkg = spack.repo.get('multimethod%intel')
self.assertEqual(pkg.has_a_default(), 'intel')
- pkg = spack.db.get('multimethod%pgi')
+ pkg = spack.repo.get('multimethod%pgi')
self.assertEqual(pkg.has_a_default(), 'default')
def test_architecture_match(self):
- pkg = spack.db.get('multimethod=x86_64')
+ pkg = spack.repo.get('multimethod=x86_64')
self.assertEqual(pkg.different_by_architecture(), 'x86_64')
- pkg = spack.db.get('multimethod=ppc64')
+ pkg = spack.repo.get('multimethod=ppc64')
self.assertEqual(pkg.different_by_architecture(), 'ppc64')
- pkg = spack.db.get('multimethod=ppc32')
+ pkg = spack.repo.get('multimethod=ppc32')
self.assertEqual(pkg.different_by_architecture(), 'ppc32')
- pkg = spack.db.get('multimethod=arm64')
+ pkg = spack.repo.get('multimethod=arm64')
self.assertEqual(pkg.different_by_architecture(), 'arm64')
- pkg = spack.db.get('multimethod=macos')
+ pkg = spack.repo.get('multimethod=macos')
self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
def test_dependency_match(self):
- pkg = spack.db.get('multimethod^zmpi')
+ pkg = spack.repo.get('multimethod^zmpi')
self.assertEqual(pkg.different_by_dep(), 'zmpi')
- pkg = spack.db.get('multimethod^mpich')
+ pkg = spack.repo.get('multimethod^mpich')
self.assertEqual(pkg.different_by_dep(), 'mpich')
# If we try to switch on some entirely different dep, it's ambiguous,
# but should take the first option
- pkg = spack.db.get('multimethod^foobar')
+ pkg = spack.repo.get('multimethod^foobar')
self.assertEqual(pkg.different_by_dep(), 'mpich')
def test_virtual_dep_match(self):
- pkg = spack.db.get('multimethod^mpich2')
+ pkg = spack.repo.get('multimethod^mpich2')
self.assertEqual(pkg.different_by_virtual_dep(), 2)
- pkg = spack.db.get('multimethod^mpich@1.0')
+ pkg = spack.repo.get('multimethod^mpich@1.0')
self.assertEqual(pkg.different_by_virtual_dep(), 1)
diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py
new file mode 100644
index 0000000000..d0d809004d
--- /dev/null
+++ b/lib/spack/spack/test/namespace_trie.py
@@ -0,0 +1,114 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import unittest
+from spack.util.naming import NamespaceTrie
+
+
+class NamespaceTrieTest(unittest.TestCase):
+
+ def setUp(self):
+ self.trie = NamespaceTrie()
+
+
+ def test_add_single(self):
+ self.trie['foo'] = 'bar'
+
+ self.assertTrue(self.trie.is_prefix('foo'))
+ self.assertTrue(self.trie.has_value('foo'))
+ self.assertEqual(self.trie['foo'], 'bar')
+
+
+ def test_add_multiple(self):
+ self.trie['foo.bar'] = 'baz'
+
+ self.assertFalse(self.trie.has_value('foo'))
+ self.assertTrue(self.trie.is_prefix('foo'))
+
+ self.assertTrue(self.trie.is_prefix('foo.bar'))
+ self.assertTrue(self.trie.has_value('foo.bar'))
+ self.assertEqual(self.trie['foo.bar'], 'baz')
+
+ self.assertFalse(self.trie.is_prefix('foo.bar.baz'))
+ self.assertFalse(self.trie.has_value('foo.bar.baz'))
+
+
+ def test_add_three(self):
+ # add a three-level namespace
+ self.trie['foo.bar.baz'] = 'quux'
+
+ self.assertTrue(self.trie.is_prefix('foo'))
+ self.assertFalse(self.trie.has_value('foo'))
+
+ self.assertTrue(self.trie.is_prefix('foo.bar'))
+ self.assertFalse(self.trie.has_value('foo.bar'))
+
+ self.assertTrue(self.trie.is_prefix('foo.bar.baz'))
+ self.assertTrue(self.trie.has_value('foo.bar.baz'))
+ self.assertEqual(self.trie['foo.bar.baz'], 'quux')
+
+ self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux'))
+ self.assertFalse(self.trie.has_value('foo.bar.baz.quux'))
+
+ # Try to add a second element in a prefix namespace
+ self.trie['foo.bar'] = 'blah'
+
+ self.assertTrue(self.trie.is_prefix('foo'))
+ self.assertFalse(self.trie.has_value('foo'))
+
+ self.assertTrue(self.trie.is_prefix('foo.bar'))
+ self.assertTrue(self.trie.has_value('foo.bar'))
+ self.assertEqual(self.trie['foo.bar'], 'blah')
+
+ self.assertTrue(self.trie.is_prefix('foo.bar.baz'))
+ self.assertTrue(self.trie.has_value('foo.bar.baz'))
+ self.assertEqual(self.trie['foo.bar.baz'], 'quux')
+
+ self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux'))
+ self.assertFalse(self.trie.has_value('foo.bar.baz.quux'))
+
+
+ def test_add_none_single(self):
+ self.trie['foo'] = None
+ self.assertTrue(self.trie.is_prefix('foo'))
+ self.assertTrue(self.trie.has_value('foo'))
+ self.assertEqual(self.trie['foo'], None)
+
+ self.assertFalse(self.trie.is_prefix('foo.bar'))
+ self.assertFalse(self.trie.has_value('foo.bar'))
+
+
+
+ def test_add_none_multiple(self):
+ self.trie['foo.bar'] = None
+
+ self.assertTrue(self.trie.is_prefix('foo'))
+ self.assertFalse(self.trie.has_value('foo'))
+
+ self.assertTrue(self.trie.is_prefix('foo.bar'))
+ self.assertTrue(self.trie.has_value('foo.bar'))
+ self.assertEqual(self.trie['foo.bar'], None)
+
+ self.assertFalse(self.trie.is_prefix('foo.bar.baz'))
+ self.assertFalse(self.trie.has_value('foo.bar.baz'))
diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py
index fbee0cfa8f..ebd7281999 100644
--- a/lib/spack/spack/test/optional_deps.py
+++ b/lib/spack/spack/test/optional_deps.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index 6222e7b5f8..ee09040d0d 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -28,16 +28,15 @@ This test does sanity checks on Spack's builtin package database.
import unittest
import spack
-import spack.url as url
-from spack.packages import PackageDB
+from spack.repository import RepoPath
class PackageSanityTest(unittest.TestCase):
def check_db(self):
"""Get all packages in a DB to make sure they work."""
- for name in spack.db.all_package_names():
- spack.db.get(name)
+ for name in spack.repo.all_package_names():
+ spack.repo.get(name)
def test_get_all_packages(self):
@@ -47,15 +46,15 @@ class PackageSanityTest(unittest.TestCase):
def test_get_all_mock_packages(self):
"""Get the mock packages once each too."""
- tmp = spack.db
- spack.db = PackageDB(spack.mock_packages_path)
+ db = RepoPath(spack.mock_packages_path)
+ spack.repo.swap(db)
self.check_db()
- spack.db = tmp
+ spack.repo.swap(db)
def test_url_versions(self):
"""Check URLs for regular packages, if they are explicitly defined."""
- for pkg in spack.db.all_packages():
+ for pkg in spack.repo.all_packages():
for v, vattrs in pkg.versions.items():
if 'url' in vattrs:
# If there is a url for the version check it.
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index a8183cf6a6..83984dc5f6 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -27,7 +27,7 @@ import unittest
from llnl.util.filesystem import join_path
import spack
-import spack.packages as packages
+from spack.repository import Repo
from spack.util.naming import mod_to_class
from spack.test.mock_packages_test import *
@@ -35,27 +35,32 @@ from spack.test.mock_packages_test import *
class PackagesTest(MockPackagesTest):
def test_load_package(self):
- pkg = spack.db.get('mpich')
+ pkg = spack.repo.get('mpich')
def test_package_name(self):
- pkg = spack.db.get('mpich')
+ pkg = spack.repo.get('mpich')
self.assertEqual(pkg.name, 'mpich')
def test_package_filename(self):
- filename = spack.db.filename_for_package_name('mpich')
- self.assertEqual(filename, join_path(spack.mock_packages_path, 'mpich', 'package.py'))
+ repo = Repo(spack.mock_packages_path)
+ filename = repo.filename_for_package_name('mpich')
+ self.assertEqual(filename,
+ join_path(spack.mock_packages_path, 'packages', 'mpich', 'package.py'))
def test_package_name(self):
- pkg = spack.db.get('mpich')
+ pkg = spack.repo.get('mpich')
self.assertEqual(pkg.name, 'mpich')
def test_nonexisting_package_filename(self):
- filename = spack.db.filename_for_package_name('some-nonexisting-package')
- self.assertEqual(filename, join_path(spack.mock_packages_path, 'some-nonexisting-package', 'package.py'))
+ repo = Repo(spack.mock_packages_path)
+ filename = repo.filename_for_package_name('some-nonexisting-package')
+ self.assertEqual(
+ filename,
+ join_path(spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py'))
def test_package_class_names(self):
@@ -64,3 +69,38 @@ class PackagesTest(MockPackagesTest):
self.assertEqual('PmgrCollective', mod_to_class('pmgr-collective'))
self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective'))
self.assertEqual('_3db', mod_to_class('3db'))
+
+
+ #
+ # Below tests target direct imports of spack packages from the
+ # spack.pkg namespace
+ #
+
+ def test_import_package(self):
+ import spack.pkg.builtin.mock.mpich
+
+
+ def test_import_package_as(self):
+ import spack.pkg.builtin.mock.mpich as mp
+
+
+ def test_import_class_from_package(self):
+ from spack.pkg.builtin.mock.mpich import Mpich
+
+
+ def test_import_module_from_package(self):
+ from spack.pkg.builtin.mock import mpich
+
+
+ def test_import_namespace_container_modules(self):
+ import spack.pkg
+ import spack.pkg as p
+ from spack import pkg
+
+ import spack.pkg.builtin
+ import spack.pkg.builtin as b
+ from spack.pkg import builtin
+
+ import spack.pkg.builtin.mock
+ import spack.pkg.builtin.mock as m
+ from spack.pkg.builtin import mock
diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py
index 5779d31ed2..d74d3b9b7d 100644
--- a/lib/spack/spack/test/python_version.py
+++ b/lib/spack/spack/test/python_version.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -34,7 +34,7 @@ import re
import llnl.util.tty as tty
-from external import pyqver2
+import pyqver2
import spack
spack_max_version = (2,6)
@@ -54,8 +54,8 @@ class PythonVersionTest(unittest.TestCase):
def package_py_files(self):
- for name in spack.db.all_package_names():
- yield spack.db.filename_for_package_name(name)
+ for name in spack.repo.all_package_names():
+ yield spack.repo.filename_for_package_name(name)
def check_python_versions(self, *files):
@@ -63,10 +63,6 @@ class PythonVersionTest(unittest.TestCase):
all_issues = {}
for fn in files:
- if fn != '/Users/gamblin2/src/spack/var/spack/packages/vim/package.py':
- continue
- print fn
-
with open(fn) as pyfile:
versions = pyqver2.get_versions(pyfile.read())
for ver, reasons in versions.items():
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 549f829d3e..632f777cde 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -40,8 +40,8 @@ from spack.test.mock_packages_test import *
class SpecDagTest(MockPackagesTest):
def test_conflicting_package_constraints(self):
- set_pkg_dep('mpileaks', 'mpich@1.0')
- set_pkg_dep('callpath', 'mpich@2.0')
+ self.set_pkg_dep('mpileaks', 'mpich@1.0')
+ self.set_pkg_dep('callpath', 'mpich@2.0')
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
@@ -223,25 +223,25 @@ class SpecDagTest(MockPackagesTest):
def test_unsatisfiable_version(self):
- set_pkg_dep('mpileaks', 'mpich@1.0')
+ self.set_pkg_dep('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
def test_unsatisfiable_compiler(self):
- set_pkg_dep('mpileaks', 'mpich%gcc')
+ self.set_pkg_dep('mpileaks', 'mpich%gcc')
spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
def test_unsatisfiable_compiler_version(self):
- set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
+ self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
def test_unsatisfiable_architecture(self):
- set_pkg_dep('mpileaks', 'mpich=bgqos_0')
+ self.set_pkg_dep('mpileaks', 'mpich=bgqos_0')
spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
@@ -340,16 +340,18 @@ class SpecDagTest(MockPackagesTest):
self.assertEqual(spec, expected_flat)
self.assertTrue(spec.eq_dag(expected_flat))
- self.assertEqual(spec, expected_normalized)
+ # Normalized has different DAG structure, so NOT equal.
+ self.assertNotEqual(spec, expected_normalized)
self.assertFalse(spec.eq_dag(expected_normalized))
- self.assertEqual(spec, non_unique_nodes)
+ # Again, different DAG structure so not equal.
+ self.assertNotEqual(spec, non_unique_nodes)
self.assertFalse(spec.eq_dag(non_unique_nodes))
spec.normalize()
# After normalizing, spec_dag_equal should match the normalized spec.
- self.assertEqual(spec, expected_flat)
+ self.assertNotEqual(spec, expected_flat)
self.assertFalse(spec.eq_dag(expected_flat))
self.assertEqual(spec, expected_normalized)
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 6666dbbb52..44a09cbd7f 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -35,7 +35,10 @@ class SpecSematicsTest(MockPackagesTest):
# ================================================================================
def check_satisfies(self, spec, anon_spec, concrete=False):
left = Spec(spec, concrete=concrete)
- right = parse_anonymous_spec(anon_spec, left.name)
+ try:
+ right = Spec(anon_spec) # if it's not anonymous, allow it.
+ except:
+ right = parse_anonymous_spec(anon_spec, left.name)
# Satisfies is one-directional.
self.assertTrue(left.satisfies(right))
@@ -48,7 +51,10 @@ class SpecSematicsTest(MockPackagesTest):
def check_unsatisfiable(self, spec, anon_spec, concrete=False):
left = Spec(spec, concrete=concrete)
- right = parse_anonymous_spec(anon_spec, left.name)
+ try:
+ right = Spec(anon_spec) # if it's not anonymous, allow it.
+ except:
+ right = parse_anonymous_spec(anon_spec, left.name)
self.assertFalse(left.satisfies(right))
self.assertFalse(left.satisfies(anon_spec))
@@ -88,6 +94,28 @@ class SpecSematicsTest(MockPackagesTest):
self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1')
+ def test_satisfies_namespace(self):
+ self.check_satisfies('builtin.mpich', 'mpich')
+ self.check_satisfies('builtin.mock.mpich', 'mpich')
+
+ # TODO: only works for deps now, but shouldn't we allow this for root spec?
+ # self.check_satisfies('builtin.mock.mpich', 'mpi')
+
+ self.check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich')
+
+ self.check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich')
+
+
+ def test_satisfies_namespaced_dep(self):
+ """Ensure spec from same or unspecified namespace satisfies namespace constraint."""
+ self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
+
+ self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
+ self.check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich')
+
+ self.check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich')
+
+
def test_satisfies_compiler(self):
self.check_satisfies('foo%gcc', '%gcc')
self.check_satisfies('foo%intel', '%intel')
@@ -190,11 +218,23 @@ class SpecSematicsTest(MockPackagesTest):
def test_satisfies_virtual(self):
+ # Don't use check_satisfies: it checks constrain() too, and
+ # you can't constrain a non-virtual by a virtual.
self.assertTrue(Spec('mpich').satisfies(Spec('mpi')))
self.assertTrue(Spec('mpich2').satisfies(Spec('mpi')))
self.assertTrue(Spec('zmpi').satisfies(Spec('mpi')))
+ def test_satisfies_virtual_dep_with_virtual_constraint(self):
+ """Ensure we can satisfy virtual constraints when there are multiple
+ vdep providers in the specs."""
+ self.assertTrue(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^openblas'))
+ self.assertFalse(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^openblas'))
+
+ self.assertFalse(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^netlib-blas'))
+ self.assertTrue(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^netlib-blas'))
+
+
# ================================================================================
# Indexing specs
# ================================================================================
@@ -327,4 +367,3 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
self.check_constrain_not_changed('libelf^foo=bgqos_0', 'libelf^foo=bgqos_0')
-
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index 404f38906e..1daaa4be8f 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index 869befc02a..11987ea1b3 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py
index 8cff8f7960..c1b2a2a573 100644
--- a/lib/spack/spack/test/stage.py
+++ b/lib/spack/spack/test/stage.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py
index 184fe8faa1..7d150b42f4 100644
--- a/lib/spack/spack/test/svn_fetch.py
+++ b/lib/spack/spack/test/svn_fetch.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -49,7 +49,7 @@ class SvnFetchTest(MockPackagesTest):
spec = Spec('svn-test')
spec.concretize()
- self.pkg = spack.db.get(spec, new=True)
+ self.pkg = spack.repo.get(spec, new=True)
def tearDown(self):
@@ -65,7 +65,7 @@ class SvnFetchTest(MockPackagesTest):
def assert_rev(self, rev):
"""Check that the current revision is equal to the supplied rev."""
def get_rev():
- output = svn('info', return_output=True)
+ output = svn('info', output=str)
self.assertTrue("Revision" in output)
for line in output.split('\n'):
match = re.match(r'Revision: (\d+)', line)
diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py
new file mode 100644
index 0000000000..9ca898c47c
--- /dev/null
+++ b/lib/spack/spack/test/tally_plugin.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from nose.plugins import Plugin
+
+import os
+
+class Tally(Plugin):
+ name = 'tally'
+
+ def __init__(self):
+ super(Tally, self).__init__()
+ self.successCount = 0
+ self.failCount = 0
+ self.errorCount = 0
+
+ @property
+ def numberOfTestsRun(self):
+ """Excludes skipped tests"""
+ return self.errorCount + self.failCount + self.successCount
+
+ def options(self, parser, env=os.environ):
+ super(Tally, self).options(parser, env=env)
+
+ def configure(self, options, conf):
+ super(Tally, self).configure(options, conf)
+
+ def addSuccess(self, test):
+ self.successCount += 1
+
+ def addError(self, test, err):
+ self.errorCount += 1
+
+ def addFailure(self, test, err):
+ self.failCount += 1
+
+ def finalize(self, result):
+ pass
diff --git a/lib/spack/spack/test/unit_install.py b/lib/spack/spack/test/unit_install.py
new file mode 100644
index 0000000000..ccc409dd60
--- /dev/null
+++ b/lib/spack/spack/test/unit_install.py
@@ -0,0 +1,125 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import unittest
+import itertools
+
+import spack
+test_install = __import__("spack.cmd.test-install",
+ fromlist=["BuildId", "create_test_output", "TestResult"])
+
+class MockOutput(object):
+ def __init__(self):
+ self.results = {}
+
+ def add_test(self, buildId, passed=True, buildInfo=None):
+ self.results[buildId] = passed
+
+ def write_to(self, stream):
+ pass
+
+class MockSpec(object):
+ def __init__(self, name, version, hashStr=None):
+ self.dependencies = {}
+ self.name = name
+ self.version = version
+ self.hash = hashStr if hashStr else hash((name, version))
+
+ def traverse(self, order=None):
+ allDeps = itertools.chain.from_iterable(i.traverse() for i in
+ self.dependencies.itervalues())
+ return set(itertools.chain([self], allDeps))
+
+ def dag_hash(self):
+ return self.hash
+
+ def to_yaml(self):
+ return "<<<MOCK YAML {0}>>>".format(test_install.BuildId(self).stringId())
+
+class MockPackage(object):
+ def __init__(self, buildLogPath):
+ self.installed = False
+ self.build_log_path = buildLogPath
+
+specX = MockSpec("X", "1.2.0")
+specY = MockSpec("Y", "2.3.8")
+specX.dependencies['Y'] = specY
+pkgX = MockPackage('logX')
+pkgY = MockPackage('logY')
+bIdX = test_install.BuildId(specX)
+bIdY = test_install.BuildId(specY)
+
+class UnitInstallTest(unittest.TestCase):
+ """Tests test-install where X->Y"""
+
+ def setUp(self):
+ super(UnitInstallTest, self).setUp()
+
+ pkgX.installed = False
+ pkgY.installed = False
+
+ self.saved_db = spack.repo
+ pkgDb = MockPackageDb({specX:pkgX, specY:pkgY})
+ spack.repo = pkgDb
+
+
+ def tearDown(self):
+ super(UnitInstallTest, self).tearDown()
+
+ spack.repo = self.saved_db
+
+ def test_installing_both(self):
+ mo = MockOutput()
+
+ pkgX.installed = True
+ pkgY.installed = True
+ test_install.create_test_output(specX, [specX, specY], mo, getLogFunc=mock_fetch_log)
+
+ self.assertEqual(mo.results,
+ {bIdX:test_install.TestResult.PASSED,
+ bIdY:test_install.TestResult.PASSED})
+
+
+ def test_dependency_already_installed(self):
+ mo = MockOutput()
+
+ pkgX.installed = True
+ pkgY.installed = True
+ test_install.create_test_output(specX, [specX], mo, getLogFunc=mock_fetch_log)
+ self.assertEqual(mo.results, {bIdX:test_install.TestResult.PASSED})
+
+ #TODO: add test(s) where Y fails to install
+
+
+class MockPackageDb(object):
+ def __init__(self, init=None):
+ self.specToPkg = {}
+ if init:
+ self.specToPkg.update(init)
+
+ def get(self, spec):
+ return self.specToPkg[spec]
+
+def mock_fetch_log(path):
+ return []
diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py
index 00d8216020..87adf89401 100644
--- a/lib/spack/spack/test/url_extrapolate.py
+++ b/lib/spack/spack/test/url_extrapolate.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py
index ae1d559f7c..efde7c0c73 100644
--- a/lib/spack/spack/test/url_parse.py
+++ b/lib/spack/spack/test/url_parse.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py
index db7ddd251d..aec8baf4ea 100644
--- a/lib/spack/spack/test/url_substitution.py
+++ b/lib/spack/spack/test/url_substitution.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -29,7 +29,6 @@ import unittest
import spack
import spack.url as url
-from spack.packages import PackageDB
class PackageSanityTest(unittest.TestCase):
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index 20e946e90e..108450e098 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py
new file mode 100644
index 0000000000..5a357b8e69
--- /dev/null
+++ b/lib/spack/spack/test/yaml.py
@@ -0,0 +1,93 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+Test Spack's custom YAML format.
+"""
+import unittest
+import spack.util.spack_yaml as syaml
+
+test_file = """\
+config_file:
+ x86_64:
+ foo: /path/to/foo
+ bar: /path/to/bar
+ baz: /path/to/baz
+ some_list:
+ - item 1
+ - item 2
+ - item 3
+ another_list:
+ [ 1, 2, 3 ]
+ some_key: some_string
+"""
+
+test_data = {
+ 'config_file' : syaml.syaml_dict([
+ ('x86_64', syaml.syaml_dict([
+ ('foo', '/path/to/foo'),
+ ('bar', '/path/to/bar'),
+ ('baz', '/path/to/baz' )])),
+ ('some_list', [ 'item 1', 'item 2', 'item 3' ]),
+ ('another_list', [ 1, 2, 3 ]),
+ ('some_key', 'some_string')
+ ])}
+
+class YamlTest(unittest.TestCase):
+
+ def setUp(self):
+ self.data = syaml.load(test_file)
+
+
+ def test_parse(self):
+ self.assertEqual(test_data, self.data)
+
+
+ def test_dict_order(self):
+ self.assertEqual(
+ ['x86_64', 'some_list', 'another_list', 'some_key'],
+ self.data['config_file'].keys())
+
+ self.assertEqual(
+ ['foo', 'bar', 'baz'],
+ self.data['config_file']['x86_64'].keys())
+
+
+ def test_line_numbers(self):
+ def check(obj, start_line, end_line):
+ self.assertEqual(obj._start_mark.line, start_line)
+ self.assertEqual(obj._end_mark.line, end_line)
+
+ check(self.data, 0, 12)
+ check(self.data['config_file'], 1, 12)
+ check(self.data['config_file']['x86_64'], 2, 5)
+ check(self.data['config_file']['x86_64']['foo'], 2, 2)
+ check(self.data['config_file']['x86_64']['bar'], 3, 3)
+ check(self.data['config_file']['x86_64']['baz'], 4, 4)
+ check(self.data['config_file']['some_list'], 6, 9)
+ check(self.data['config_file']['some_list'][0], 6, 6)
+ check(self.data['config_file']['some_list'][1], 7, 7)
+ check(self.data['config_file']['some_list'][2], 8, 8)
+ check(self.data['config_file']['another_list'], 10, 10)
+ check(self.data['config_file']['some_key'], 11, 11)
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index 58838306af..02c0b83e26 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -70,7 +70,7 @@ def find_list_url(url):
"""
url_types = [
- # e.g. https://github.com/scalability-llnl/callpath/archive/v1.0.1.tar.gz
+ # e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
(r'^(https://github.com/[^/]+/[^/]+)/archive/', lambda m: m.group(1) + '/releases')
]
@@ -209,8 +209,8 @@ def parse_version_offset(path):
# e.g. foobar-4.5.1
(r'-((\d+\.)*\d+)$', stem),
- # e.g. foobar-4.5.1b
- (r'-((\d+\.)*\d+\-?([a-z]|rc|RC|tp|TP)\d*)$', stem),
+ # e.g. foobar-4.5.1b, foobar4.5RC, foobar.v4.5.1b
+ (r'[-._]?v?((\d+\.)*\d+[-._]?([a-z]|rc|RC|tp|TP?)\d*)$', stem),
# e.g. foobar-4.5.0-beta1, or foobar-4.50-beta
(r'-((\d+\.)*\d+-beta(\d+)?)$', stem),
diff --git a/lib/spack/spack/util/__init__.py b/lib/spack/spack/util/__init__.py
index 1c388f31c5..b54691b67c 100644
--- a/lib/spack/spack/util/__init__.py
+++ b/lib/spack/spack/util/__init__.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index fd17785ad0..ea1f233bce 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index 8a8574cd3d..5269260284 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/util/debug.py b/lib/spack/spack/util/debug.py
index 37985eccdd..7930753f6f 100644
--- a/lib/spack/spack/util/debug.py
+++ b/lib/spack/spack/util/debug.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py
index 7a4ff919ad..cd413dcfbc 100644
--- a/lib/spack/spack/util/environment.py
+++ b/lib/spack/spack/util/environment.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index d1dfb62ffb..fc27b789d0 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -55,24 +55,80 @@ class Executable(object):
def __call__(self, *args, **kwargs):
- """Run the executable with subprocess.check_output, return output."""
- return_output = kwargs.get("return_output", False)
- fail_on_error = kwargs.get("fail_on_error", True)
- ignore_errors = kwargs.get("ignore_errors", ())
+ """Run this executable in a subprocess.
- output = kwargs.get("output", sys.stdout)
- error = kwargs.get("error", sys.stderr)
- input = kwargs.get("input", None)
+ Arguments
+ args
+ command line arguments to the executable to run.
+
+ Optional arguments
+
+ fail_on_error
+
+ Raise an exception if the subprocess returns an
+ error. Default is True. When not set, the return code is
+ avaiale as `exe.returncode`.
+
+ ignore_errors
+
+ An optional list/tuple of error codes that can be
+ *ignored*. i.e., if these codes are returned, this will
+ not raise an exception when `fail_on_error` is `True`.
+
+ output, error
+
+ These arguments allow you to specify new stdout and stderr
+ values. They default to `None`, which means the
+ subprocess will inherit the parent's file descriptors.
+
+ You can set these to:
+ - python streams, e.g. open Python file objects, or os.devnull;
+ - filenames, which will be automatically opened for writing; or
+ - `str`, as in the Python string type. If you set these to `str`,
+ output and error will be written to pipes and returned as
+ a string. If both `output` and `error` are set to `str`,
+ then one string is returned containing output concatenated
+ with error.
+
+ input
+
+ Same as output, error, but `str` is not an allowed value.
+
+ Deprecated arguments
+
+ return_output[=False]
+
+ Setting this to True is the same as setting output=str.
+ This argument may be removed in future Spack versions.
+
+ """
+ fail_on_error = kwargs.pop("fail_on_error", True)
+ ignore_errors = kwargs.pop("ignore_errors", ())
+
+ # TODO: This is deprecated. Remove in a future version.
+ return_output = kwargs.pop("return_output", False)
+
+ # Default values of None says to keep parent's file descriptors.
+ if return_output:
+ output = str
+ else:
+ output = kwargs.pop("output", None)
+
+ error = kwargs.pop("error", None)
+ input = kwargs.pop("input", None)
+ if input is str:
+ raise ValueError("Cannot use `str` as input stream.")
def streamify(arg, mode):
if isinstance(arg, basestring):
return open(arg, mode), True
- elif arg is None and mode != 'r':
- return open(os.devnull, mode), True
- return arg, False
- output, ostream = streamify(output, 'w')
- error, estream = streamify(error, 'w')
- input, istream = streamify(input, 'r')
+ elif arg is str:
+ return subprocess.PIPE, False
+ else:
+ return arg, False
+ ostream, close_ostream = streamify(output, 'w')
+ estream, close_estream = streamify(error, 'w')
+ istream, close_istream = streamify(input, 'r')
# if they just want to ignore one error code, make it a tuple.
if isinstance(ignore_errors, int):
@@ -93,19 +149,19 @@ class Executable(object):
try:
proc = subprocess.Popen(
- cmd,
- stdin=input,
- stderr=error,
- stdout=subprocess.PIPE if return_output else output)
+ cmd, stdin=istream, stderr=estream, stdout=ostream)
out, err = proc.communicate()
- self.returncode = proc.returncode
- rc = proc.returncode
+ rc = self.returncode = proc.returncode
if fail_on_error and rc != 0 and (rc not in ignore_errors):
raise ProcessError("Command exited with status %d:"
% proc.returncode, cmd_line)
- if return_output:
- return out
+
+ if output is str or error is str:
+ result = ''
+ if output is str: result += out
+ if error is str: result += err
+ return result
except OSError, e:
raise ProcessError(
@@ -120,9 +176,9 @@ class Executable(object):
% (proc.returncode, cmd_line))
finally:
- if ostream: output.close()
- if estream: error.close()
- if istream: input.close()
+ if close_ostream: output.close()
+ if close_estream: error.close()
+ if close_istream: input.close()
def __eq__(self, other):
diff --git a/lib/spack/spack/util/multiproc.py b/lib/spack/spack/util/multiproc.py
index 9e045a090f..8ca82df011 100644
--- a/lib/spack/spack/util/multiproc.py
+++ b/lib/spack/spack/util/multiproc.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -27,9 +27,11 @@ This implements a parallel map operation but it can accept more values
than multiprocessing.Pool.apply() can. For example, apply() will fail
to pickle functions if they're passed indirectly as parameters.
"""
-from multiprocessing import Process, Pipe
+from multiprocessing import Process, Pipe, Semaphore, Value
from itertools import izip
+__all__ = ['spawn', 'parmap', 'Barrier']
+
def spawn(f):
def fun(pipe,x):
pipe.send(f(x))
@@ -43,3 +45,49 @@ def parmap(f,X):
[p.join() for p in proc]
return [p.recv() for (p,c) in pipe]
+
+class Barrier:
+ """Simple reusable semaphore barrier.
+
+ Python 2.6 doesn't have multiprocessing barriers so we implement this.
+
+ See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
+ """
+ def __init__(self, n, timeout=None):
+ self.n = n
+ self.to = timeout
+ self.count = Value('i', 0)
+ self.mutex = Semaphore(1)
+ self.turnstile1 = Semaphore(0)
+ self.turnstile2 = Semaphore(1)
+
+
+ def wait(self):
+ if not self.mutex.acquire(timeout=self.to):
+ raise BarrierTimeoutError()
+ self.count.value += 1
+ if self.count.value == self.n:
+ if not self.turnstile2.acquire(timeout=self.to):
+ raise BarrierTimeoutError()
+ self.turnstile1.release()
+ self.mutex.release()
+
+ if not self.turnstile1.acquire(timeout=self.to):
+ raise BarrierTimeoutError()
+ self.turnstile1.release()
+
+ if not self.mutex.acquire(timeout=self.to):
+ raise BarrierTimeoutError()
+ self.count.value -= 1
+ if self.count.value == 0:
+ if not self.turnstile1.acquire(timeout=self.to):
+ raise BarrierTimeoutError()
+ self.turnstile2.release()
+ self.mutex.release()
+
+ if not self.turnstile2.acquire(timeout=self.to):
+ raise BarrierTimeoutError()
+ self.turnstile2.release()
+
+
+class BarrierTimeoutError: pass
diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py
index 782afbd4bb..5025f15027 100644
--- a/lib/spack/spack/util/naming.py
+++ b/lib/spack/spack/util/naming.py
@@ -1,13 +1,22 @@
# Need this because of spack.util.string
from __future__ import absolute_import
import string
+import itertools
import re
+from StringIO import StringIO
import spack
+__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name',
+ 'valid_fully_qualified_module_name', 'validate_fully_qualified_module_name',
+ 'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie']
+
# Valid module names can contain '-' but can't start with it.
_valid_module_re = r'^\w[\w-]*$'
+# Valid module names can contain '-' but can't start with it.
+_valid_fully_qualified_module_re = r'^(\w[\w-]*)(\.\w[\w-]*)*$'
+
def mod_to_class(mod_name):
"""Convert a name from module style to class name style. Spack mostly
@@ -42,20 +51,160 @@ def mod_to_class(mod_name):
return class_name
+def spack_module_to_python_module(mod_name):
+ """Given a Spack module name, returns the name by which it can be
+ imported in Python.
+ """
+ if re.match(r'[0-9]', mod_name):
+ mod_name = 'num' + mod_name
+
+ return mod_name.replace('-', '_')
+
+
+def possible_spack_module_names(python_mod_name):
+ """Given a Python module name, return a list of all possible spack module
+ names that could correspond to it."""
+ mod_name = re.sub(r'^num(\d)', r'\1', python_mod_name)
+
+ parts = re.split(r'(_)', mod_name)
+ options = [['_', '-']] * mod_name.count('_')
+
+ results = []
+ for subs in itertools.product(*options):
+ s = list(parts)
+ s[1::2] = subs
+ results.append(''.join(s))
+
+ return results
+
+
def valid_module_name(mod_name):
- """Return whether the mod_name is valid for use in Spack."""
+ """Return whether mod_name is valid for use in Spack."""
return bool(re.match(_valid_module_re, mod_name))
+def valid_fully_qualified_module_name(mod_name):
+ """Return whether mod_name is a valid namespaced module name."""
+ return bool(re.match(_valid_fully_qualified_module_re, mod_name))
+
+
def validate_module_name(mod_name):
"""Raise an exception if mod_name is not valid."""
if not valid_module_name(mod_name):
raise InvalidModuleNameError(mod_name)
+def validate_fully_qualified_module_name(mod_name):
+ """Raise an exception if mod_name is not a valid namespaced module name."""
+ if not valid_fully_qualified_module_name(mod_name):
+ raise InvalidFullyQualifiedModuleNameError(mod_name)
+
+
class InvalidModuleNameError(spack.error.SpackError):
"""Raised when we encounter a bad module name."""
def __init__(self, name):
super(InvalidModuleNameError, self).__init__(
"Invalid module name: " + name)
self.name = name
+
+
+class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
+ """Raised when we encounter a bad full package name."""
+ def __init__(self, name):
+ super(InvalidFullyQualifiedModuleNameError, self).__init__(
+ "Invalid fully qualified package name: " + name)
+ self.name = name
+
+
+class NamespaceTrie(object):
+ class Element(object):
+ def __init__(self, value):
+ self.value = value
+
+
+ def __init__(self, separator='.'):
+ self._subspaces = {}
+ self._value = None
+ self._sep = separator
+
+
+ def __setitem__(self, namespace, value):
+ first, sep, rest = namespace.partition(self._sep)
+
+ if not first:
+ self._value = NamespaceTrie.Element(value)
+ return
+
+ if first not in self._subspaces:
+ self._subspaces[first] = NamespaceTrie()
+
+ self._subspaces[first][rest] = value
+
+
+ def _get_helper(self, namespace, full_name):
+ first, sep, rest = namespace.partition(self._sep)
+ if not first:
+ if not self._value:
+ raise KeyError("Can't find namespace '%s' in trie" % full_name)
+ return self._value.value
+ elif first not in self._subspaces:
+ raise KeyError("Can't find namespace '%s' in trie" % full_name)
+ else:
+ return self._subspaces[first]._get_helper(rest, full_name)
+
+
+ def __getitem__(self, namespace):
+ return self._get_helper(namespace, namespace)
+
+
+ def is_prefix(self, namespace):
+ """True if the namespace has a value, or if it's the prefix of one that does."""
+ first, sep, rest = namespace.partition(self._sep)
+ if not first:
+ return True
+ elif first not in self._subspaces:
+ return False
+ else:
+ return self._subspaces[first].is_prefix(rest)
+
+
+ def is_leaf(self, namespace):
+ """True if this namespace has no children in the trie."""
+ first, sep, rest = namespace.partition(self._sep)
+ if not first:
+ return bool(self._subspaces)
+ elif first not in self._subspaces:
+ return False
+ else:
+ return self._subspaces[first].is_leaf(rest)
+
+
+ def has_value(self, namespace):
+ """True if there is a value set for the given namespace."""
+ first, sep, rest = namespace.partition(self._sep)
+ if not first:
+ return self._value is not None
+ elif first not in self._subspaces:
+ return False
+ else:
+ return self._subspaces[first].has_value(rest)
+
+
+ def __contains__(self, namespace):
+ """Returns whether a value has been set for the namespace."""
+ return self.has_value(namespace)
+
+
+ def _str_helper(self, stream, level=0):
+ indent = (level * ' ')
+ for name in sorted(self._subspaces):
+ stream.write(indent + name + '\n')
+ if self._value:
+ stream.write(indent + ' ' + repr(self._value.value))
+ stream.write(self._subspaces[name]._str_helper(stream, level+1))
+
+
+ def __str__(self):
+ stream = StringIO()
+ self._str_helper(stream)
+ return stream.getvalue()
diff --git a/lib/spack/spack/util/prefix.py b/lib/spack/spack/util/prefix.py
index 7bd63c16ca..c613ca5182 100644
--- a/lib/spack/spack/util/prefix.py
+++ b/lib/spack/spack/util/prefix.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py
new file mode 100644
index 0000000000..728e86b8ee
--- /dev/null
+++ b/lib/spack/spack/util/spack_yaml.py
@@ -0,0 +1,201 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Enhanced YAML parsing for Spack.
+
+- ``load()`` preserves YAML Marks on returned objects -- this allows
+ us to access file and line information later.
+
+- ``Our load methods use ``OrderedDict`` class instead of YAML's
+ default unorderd dict.
+
+"""
+import yaml
+from yaml.nodes import *
+from yaml.constructor import ConstructorError
+from yaml.representer import SafeRepresenter
+from ordereddict_backport import OrderedDict
+
+# Only export load and dump
+__all__ = ['load', 'dump']
+
+# Make new classes so we can add custom attributes.
+# Also, use OrderedDict instead of just dict.
+class syaml_dict(OrderedDict):
+ def __repr__(self):
+ mappings = ('%r: %r' % (k,v) for k,v in self.items())
+ return '{%s}' % ', '.join(mappings)
+class syaml_list(list):
+ __repr__ = list.__repr__
+class syaml_str(str):
+ __repr__ = str.__repr__
+
+def mark(obj, node):
+ """Add start and end markers to an object."""
+ obj._start_mark = node.start_mark
+ obj._end_mark = node.end_mark
+
+
+class OrderedLineLoader(yaml.Loader):
+ """YAML loader that preserves order and line numbers.
+
+ Mappings read in by this loader behave like an ordered dict.
+ Sequences, mappings, and strings also have new attributes,
+ ``_start_mark`` and ``_end_mark``, that preserve YAML line
+ information in the output data.
+
+ """
+ #
+ # Override construct_yaml_* so that they build our derived types,
+ # which allows us to add new attributes to them.
+ #
+ # The standard YAML constructors return empty instances and fill
+ # in with mappings later. We preserve this behavior.
+ #
+ def construct_yaml_str(self, node):
+ value = self.construct_scalar(node)
+ try:
+ value = value.encode('ascii')
+ except UnicodeEncodeError:
+ pass
+ value = syaml_str(value)
+ mark(value, node)
+ return value
+
+
+ def construct_yaml_seq(self, node):
+ data = syaml_list()
+ mark(data, node)
+ yield data
+ data.extend(self.construct_sequence(node))
+
+
+ def construct_yaml_map(self, node):
+ data = syaml_dict()
+ mark(data, node)
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ #
+ # Override the ``construct_*`` routines. These fill in empty
+ # objects after yielded by the above ``construct_yaml_*`` methods.
+ #
+ def construct_sequence(self, node, deep=False):
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError(None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ value = syaml_list(self.construct_object(child, deep=deep)
+ for child in node.value)
+ mark(value, node)
+ return value
+
+
+ def construct_mapping(self, node, deep=False):
+ """Store mappings as OrderedDicts instead of as regular python
+ dictionaries to preserve file ordering."""
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+
+ mapping = syaml_dict()
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ try:
+ hash(key)
+ except TypeError, exc:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found unacceptable key (%s)" % exc, key_node.start_mark)
+ value = self.construct_object(value_node, deep=deep)
+ if key in mapping:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found already in-use key (%s)" % key, key_node.start_mark)
+ mapping[key] = value
+
+ mark(mapping, node)
+ return mapping
+
+# register above new constructors
+OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
+OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
+OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
+
+
+
+class OrderedLineDumper(yaml.Dumper):
+ """Dumper that preserves ordering and formats ``syaml_*`` objects.
+
+ This dumper preserves insertion ordering ``syaml_dict`` objects
+ when they're written out. It also has some custom formatters
+ for ``syaml_*`` objects so that they are formatted like their
+ regular Python equivalents, instead of ugly YAML pyobjects.
+
+ """
+ def represent_mapping(self, tag, mapping, flow_style=None):
+ value = []
+ node = MappingNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ if hasattr(mapping, 'items'):
+ # if it's a syaml_dict, preserve OrderedDict order.
+ # Otherwise do the default thing.
+ sort = not isinstance(mapping, syaml_dict)
+ mapping = mapping.items()
+ if sort:
+ mapping.sort()
+
+ for item_key, item_value in mapping:
+ node_key = self.represent_data(item_key)
+ node_value = self.represent_data(item_value)
+ if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ best_style = False
+ if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ best_style = False
+ value.append((node_key, node_value))
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+# Make our special objects look like normal YAML ones.
+OrderedLineDumper.add_representer(syaml_dict, OrderedLineDumper.represent_dict)
+OrderedLineDumper.add_representer(syaml_list, OrderedLineDumper.represent_list)
+OrderedLineDumper.add_representer(syaml_str, OrderedLineDumper.represent_str)
+
+
+def load(*args, **kwargs):
+ """Load but modify the loader instance so that it will add __line__
+ atrributes to the returned object."""
+ kwargs['Loader'] = OrderedLineLoader
+ return yaml.load(*args, **kwargs)
+
+
+def dump(*args, **kwargs):
+ kwargs['Dumper'] = OrderedLineDumper
+ return yaml.dump(*args, **kwargs)
diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py
index 234163bf52..1556ce6d29 100644
--- a/lib/spack/spack/util/string.py
+++ b/lib/spack/spack/util/string.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 1420d62a77..e26daef296 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -23,6 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
+import os
import sys
import subprocess
import urllib2, cookielib
@@ -70,7 +71,9 @@ def _spider(args):
"""
url, visited, root, opener, depth, max_depth, raise_on_error = args
- pages = {}
+ pages = {} # dict from page URL -> text content.
+ links = set() # set of all links seen on visited pages.
+
try:
# Make a HEAD request first to check the content type. This lets
# us ignore tarballs and gigantic files.
@@ -99,42 +102,45 @@ def _spider(args):
page = response.read()
pages[response_url] = page
- # If we're not at max depth, parse out the links in the page
- if depth < max_depth:
- link_parser = LinkParser()
- subcalls = []
- link_parser.feed(page)
-
- while link_parser.links:
- raw_link = link_parser.links.pop()
+ # Parse out the links in the page
+ link_parser = LinkParser()
+ subcalls = []
+ link_parser.feed(page)
- # Skip stuff that looks like an archive
- if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES):
- continue
+ while link_parser.links:
+ raw_link = link_parser.links.pop()
+ abs_link = urlparse.urljoin(response_url, raw_link)
- # Evaluate the link relative to the page it came from.
- abs_link = urlparse.urljoin(response_url, raw_link)
+ links.add(abs_link)
- # Skip things outside the root directory
- if not abs_link.startswith(root):
- continue
+ # Skip stuff that looks like an archive
+ if any(raw_link.endswith(suf) for suf in ALLOWED_ARCHIVE_TYPES):
+ continue
- # Skip already-visited links
- if abs_link in visited:
- continue
+ # Skip things outside the root directory
+ if not abs_link.startswith(root):
+ continue
- subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error))
- visited.add(abs_link)
+ # Skip already-visited links
+ if abs_link in visited:
+ continue
- if subcalls:
- try:
- pool = Pool(processes=len(subcalls))
- dicts = pool.map(_spider, subcalls)
- for d in dicts:
- pages.update(d)
- finally:
- pool.terminate()
- pool.join()
+ # If we're not at max depth, follow links.
+ if depth < max_depth:
+ subcalls.append((abs_link, visited, root, None,
+ depth+1, max_depth, raise_on_error))
+ visited.add(abs_link)
+
+ if subcalls:
+ try:
+ pool = Pool(processes=len(subcalls))
+ results = pool.map(_spider, subcalls)
+ for sub_pages, sub_links in results:
+ pages.update(sub_pages)
+ links.update(sub_links)
+ finally:
+ pool.terminate()
+ pool.join()
except urllib2.URLError, e:
tty.debug(e)
@@ -155,10 +161,10 @@ def _spider(args):
# Other types of errors are completely ignored, except in debug mode.
tty.debug("Error in _spider: %s" % e)
- return pages
+ return pages, links
-def get_pages(root_url, **kwargs):
+def spider(root_url, **kwargs):
"""Gets web pages from a root URL.
If depth is specified (e.g., depth=2), then this will also fetches pages
linked from the root and its children up to depth.
@@ -167,5 +173,69 @@ def get_pages(root_url, **kwargs):
performance over a sequential fetch.
"""
max_depth = kwargs.setdefault('depth', 1)
- pages = _spider((root_url, set(), root_url, None, 1, max_depth, False))
- return pages
+ pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False))
+ return pages, links
+
+
+def find_versions_of_archive(*archive_urls, **kwargs):
+ """Scrape web pages for new versions of a tarball.
+
+ Arguments:
+ archive_urls:
+ URLs for different versions of a package. Typically these
+ are just the tarballs from the package file itself. By
+ default, this searches the parent directories of archives.
+
+ Keyword Arguments:
+ list_url:
+
+ URL for a listing of archives. Spack wills scrape these
+ pages for download links that look like the archive URL.
+
+ list_depth:
+ Max depth to follow links on list_url pages.
+
+ """
+ list_url = kwargs.get('list_url', None)
+ list_depth = kwargs.get('list_depth', 1)
+
+ # Generate a list of list_urls based on archive urls and any
+ # explicitly listed list_url in the package
+ list_urls = set()
+ if list_url:
+ list_urls.add(list_url)
+ for aurl in archive_urls:
+ list_urls.add(spack.url.find_list_url(aurl))
+
+ # Grab some web pages to scrape.
+ pages = {}
+ links = set()
+ for lurl in list_urls:
+ p, l = spider(lurl, depth=list_depth)
+ pages.update(p)
+ links.update(l)
+
+ # Scrape them for archive URLs
+ regexes = []
+ for aurl in archive_urls:
+ # This creates a regex from the URL with a capture group for
+ # the version part of the URL. The capture group is converted
+ # to a generic wildcard, so we can use this to extract things
+ # on a page that look like archive URLs.
+ url_regex = spack.url.wildcard_version(aurl)
+
+ # We'll be a bit more liberal and just look for the archive
+ # part, not the full path.
+ regexes.append(os.path.basename(url_regex))
+
+ # Build a dict version -> URL from any links that match the wildcards.
+ versions = {}
+ for url in links:
+ if any(re.search(r, url) for r in regexes):
+ try:
+ ver = spack.url.parse_version(url)
+ versions[ver] = url
+ except spack.url.UndetectableVersionError as e:
+ continue
+
+ return versions
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index 3d3e2b0f6d..8959e76684 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 35db05e018..e8a0a261c9 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -48,7 +48,7 @@ import sys
import re
from bisect import bisect_left
from functools import wraps
-from external.functools import total_ordering
+from functools_backport import total_ordering
# Valid version characters
VALID_VERSION = r'[A-Za-z0-9_.-]'
diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py
index fa070e6bd5..d16aea8642 100644
--- a/lib/spack/spack/virtual.py
+++ b/lib/spack/spack/virtual.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -73,10 +73,8 @@ class ProviderIndex(object):
for provided_spec, provider_spec in pkg.provided.iteritems():
if provider_spec.satisfies(spec, deps=False):
provided_name = provided_spec.name
- if provided_name not in self.providers:
- self.providers[provided_name] = {}
- provider_map = self.providers[provided_name]
+ provider_map = self.providers.setdefault(provided_name, {})
if not provided_spec in provider_map:
provider_map[provided_spec] = set()
@@ -119,12 +117,13 @@ class ProviderIndex(object):
return sorted(providers)
- # TODO: this is pretty darned nasty, and inefficient.
+ # TODO: this is pretty darned nasty, and inefficient, but there
+ # are not that many vdeps in most specs.
def _cross_provider_maps(self, lmap, rmap):
result = {}
for lspec, rspec in itertools.product(lmap, rmap):
try:
- constrained = lspec.copy().constrain(rspec)
+ constrained = lspec.constrained(rspec)
except spack.spec.UnsatisfiableSpecError:
continue
@@ -132,10 +131,8 @@ class ProviderIndex(object):
for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
if lp_spec.name == rp_spec.name:
try:
- const = lp_spec.copy().constrain(rp_spec,deps=False)
- if constrained not in result:
- result[constrained] = set()
- result[constrained].add(const)
+ const = lp_spec.constrained(rp_spec, deps=False)
+ result.setdefault(constrained, set()).add(const)
except spack.spec.UnsatisfiableSpecError:
continue
return result
@@ -161,4 +158,4 @@ class ProviderIndex(object):
if crossed:
result[name] = crossed
- return bool(result)
+ return all(c in result for c in common)
diff --git a/share/spack/csh/spack.csh b/share/spack/csh/spack.csh
index b21da23836..d64ce8935b 100644
--- a/share/spack/csh/spack.csh
+++ b/share/spack/csh/spack.csh
@@ -101,7 +101,7 @@ case unload:
breaksw
default:
- \spack $_sp_args
+ \spack $_sp_flags $_sp_args
breaksw
endsw
diff --git a/share/spack/logo/favicon.ico b/share/spack/logo/favicon.ico
new file mode 100755
index 0000000000..95a67ae5b1
--- /dev/null
+++ b/share/spack/logo/favicon.ico
Binary files differ
diff --git a/share/spack/logo/spack-logo-text-64.png b/share/spack/logo/spack-logo-text-64.png
new file mode 100644
index 0000000000..8dad4c519f
--- /dev/null
+++ b/share/spack/logo/spack-logo-text-64.png
Binary files differ
diff --git a/share/spack/logo/spack-logo-white-text-48.png b/share/spack/logo/spack-logo-white-text-48.png
new file mode 100644
index 0000000000..9e60867e81
--- /dev/null
+++ b/share/spack/logo/spack-logo-white-text-48.png
Binary files differ
diff --git a/share/spack/setup-env.csh b/share/spack/setup-env.csh
index 5f91670a60..42d8c42726 100755
--- a/share/spack/setup-env.csh
+++ b/share/spack/setup-env.csh
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh
index b90846d28f..586a5b836b 100755
--- a/share/spack/setup-env.sh
+++ b/share/spack/setup-env.sh
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -55,7 +55,11 @@
# avoids the need to come up with a user-friendly naming scheme for
# spack dotfiles.
########################################################################
+
function spack {
+ # save raw arguments into an array before butchering them
+ args=( "$@" )
+
# accumulate initial flags for main spack command
_sp_flags=""
while [[ "$1" =~ ^- ]]; do
@@ -116,7 +120,7 @@ function spack {
esac
;;
*)
- command spack $_sp_flags $_sp_subcommand $_sp_spec
+ command spack "${args[@]}"
;;
esac
}
@@ -167,8 +171,8 @@ fi
#
_sp_share_dir=$(cd "$(dirname $_sp_source_file)" && pwd)
_sp_prefix=$(cd "$(dirname $(dirname $_sp_share_dir))" && pwd)
+_spack_pathadd PATH "${_sp_prefix%/}/bin"
-# TODO: fix SYS_TYPE to something non-LLNL-specific
-_spack_pathadd DK_NODE "$_sp_share_dir/dotkit/$SYS_TYPE"
-_spack_pathadd MODULEPATH "$_sp_share_dir/modules/$SYS_TYPE"
-_spack_pathadd PATH "$_sp_prefix/bin"
+_sp_sys_type=$(spack-python -c 'print(spack.architecture.sys_type())')
+_spack_pathadd DK_NODE "${_sp_share_dir%/}/dotkit/$_sp_sys_type"
+_spack_pathadd MODULEPATH "${_sp_share_dir%/}/modules/$_sp_sys_type"
diff --git a/var/spack/mock_configs/site_spackconfig/compilers.yaml b/var/spack/mock_configs/site_spackconfig/compilers.yaml
deleted file mode 100644
index 0a2dc893e2..0000000000
--- a/var/spack/mock_configs/site_spackconfig/compilers.yaml
+++ /dev/null
@@ -1,12 +0,0 @@
-compilers:
- all:
- clang@3.3:
- cc: /path/to/clang
- cxx: /path/to/clang++
- f77: None
- fc: None
- gcc@4.5.0:
- cc: /path/to/gcc
- cxx: /path/to/g++
- f77: /path/to/gfortran
- fc: /path/to/gfortran
diff --git a/var/spack/mock_configs/site_spackconfig/packages.yaml b/var/spack/mock_configs/site_spackconfig/packages.yaml
deleted file mode 100644
index eb52c6cf11..0000000000
--- a/var/spack/mock_configs/site_spackconfig/packages.yaml
+++ /dev/null
@@ -1,13 +0,0 @@
-packages:
- - externaltool:
- nobuild: True
- - externaltool@1.0%gcc@4.5.0:
- path: /path/to/external_tool
- - externalvirtual@2.0%clang@3.3:
- path: /path/to/external_virtual_clang
- nobuild: True
- - externalvirtual@1.0%gcc@4.5.0:
- path: /path/to/external_virtual_gcc
- nobuild: True
-
-
diff --git a/var/spack/packages/R/package.py b/var/spack/packages/R/package.py
deleted file mode 100644
index 2e6f65a742..0000000000
--- a/var/spack/packages/R/package.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from spack import *
-
-class R(Package):
- """R is 'GNU S', a freely available language and environment for
- statistical computing and graphics which provides a wide va
- riety of statistical and graphical techniques: linear and
- nonlinear modelling, statistical tests, time series analysis,
- classification, clustering, etc. Please consult the R project
- homepage for further information."""
- homepage = "http://www.example.com"
- url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
-
- version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74')
-
- depends_on("readline")
- depends_on("ncurses")
- depends_on("icu")
- depends_on("glib")
- depends_on("zlib")
- depends_on("libtiff")
- depends_on("jpeg")
- depends_on("cairo")
- depends_on("pango")
- depends_on("freetype")
- depends_on("tcl")
- depends_on("tk")
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix,
- "--enable-R-shlib",
- "--enable-BLAS-shlib")
- make()
- make("install")
diff --git a/var/spack/packages/binutils/package.py b/var/spack/packages/binutils/package.py
deleted file mode 100644
index 5a3059bbcf..0000000000
--- a/var/spack/packages/binutils/package.py
+++ /dev/null
@@ -1,17 +0,0 @@
-from spack import *
-
-class Binutils(Package):
- """GNU binutils, which contain the linker, assembler, objdump and others"""
- homepage = "http://www.gnu.org/software/binutils/"
- url = "ftp://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2"
-
- version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66')
- version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b')
- version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e')
- version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764')
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
-
- make()
- make("install")
diff --git a/var/spack/packages/boost/package.py b/var/spack/packages/boost/package.py
deleted file mode 100644
index 35824d53a2..0000000000
--- a/var/spack/packages/boost/package.py
+++ /dev/null
@@ -1,66 +0,0 @@
-from spack import *
-
-class Boost(Package):
- """Boost provides free peer-reviewed portable C++ source
- libraries, emphasizing libraries that work well with the C++
- Standard Library.
-
- Boost libraries are intended to be widely useful, and usable
- across a broad spectrum of applications. The Boost license
- encourages both commercial and non-commercial use.
- """
- homepage = "http://www.boost.org"
- url = "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2"
- list_url = "http://sourceforge.net/projects/boost/files/boost/"
- list_depth = 2
-
- version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87')
- version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546')
- version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76')
- version('1.56.0', 'a744cf167b05d72335f27c88115f211d')
- version('1.55.0', 'd6eef4b4cacb2183f2bf265a5a03a354')
- version('1.54.0', '15cb8c0803064faef0c4ddf5bc5ca279')
- version('1.53.0', 'a00d22605d5dbcfb4c9936a9b35bc4c2')
- version('1.52.0', '3a855e0f919107e0ca4de4d84ad3f750')
- version('1.51.0', '4b6bd483b692fd138aef84ed2c8eb679')
- version('1.50.0', '52dd00be775e689f55a987baebccc462')
- version('1.49.0', '0d202cb811f934282dea64856a175698')
- version('1.48.0', 'd1e9a7a7f532bb031a3c175d86688d95')
- version('1.47.0', 'a2dc343f7bc7f83f8941e47ed4a18200')
- version('1.46.1', '7375679575f4c8db605d426fc721d506')
- version('1.46.0', '37b12f1702319b73876b0097982087e0')
- version('1.45.0', 'd405c606354789d0426bc07bea617e58')
- version('1.44.0', 'f02578f5218f217a9f20e9c30e119c6a')
- version('1.43.0', 'dd49767bfb726b0c774f7db0cef91ed1')
- version('1.42.0', '7bf3b4eb841b62ffb0ade2b82218ebe6')
- version('1.41.0', '8bb65e133907db727a2a825c5400d0a6')
- version('1.40.0', 'ec3875caeac8c52c7c129802a8483bd7')
- version('1.39.0', 'a17281fd88c48e0d866e1a12deecbcc0')
- version('1.38.0', '5eca2116d39d61382b8f8235915cb267')
- version('1.37.0', '8d9f990bfb7e83769fa5f1d6f065bc92')
- version('1.36.0', '328bfec66c312150e4c2a78dcecb504b')
- version('1.35.0', 'dce952a7214e72d6597516bcac84048b')
- version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5')
- version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0')
-
-
- def url_for_version(self, version):
- """Handle Boost's weird URLs, which write the version two different ways."""
- parts = [str(p) for p in Version(version)]
- dots = ".".join(parts)
- underscores = "_".join(parts)
- return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (
- dots, underscores)
-
-
- def install(self, spec, prefix):
- bootstrap = Executable('./bootstrap.sh')
- bootstrap()
-
- # b2 used to be called bjam, before 1.47 (sigh)
- b2name = './b2' if spec.satisfies('@1.47:') else './bjam'
-
- b2 = Executable(b2name)
- b2('install',
- '-j %s' % make_jobs,
- '--prefix=%s' % prefix)
diff --git a/var/spack/packages/bzip2/package.py b/var/spack/packages/bzip2/package.py
deleted file mode 100644
index fbd5869a53..0000000000
--- a/var/spack/packages/bzip2/package.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from spack import *
-from glob import glob
-
-class Bzip2(Package):
- """bzip2 is a freely available, patent free high-quality data
- compressor. It typically compresses files to within 10% to 15%
- of the best available techniques (the PPM family of statistical
- compressors), whilst being around twice as fast at compression
- and six times faster at decompression."""
- homepage = "http://www.bzip.org"
- url = "http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz"
-
- version('1.0.6', '00b516f4704d4a7cb50a1d97e6e8e15b')
-
- def install(self, spec, prefix):
- # No configure system -- have to filter the makefile for this package.
- filter_file(r'CC=gcc', 'CC=cc', 'Makefile', string=True)
-
- make('-f', 'Makefile-libbz2_so')
- make('clean')
- make("install", "PREFIX=%s" % prefix)
-
- bzip2_exe = join_path(prefix.bin, 'bzip2')
- install('bzip2-shared', bzip2_exe)
- for libfile in glob('libbz2.so*'):
- install(libfile, prefix.lib)
-
- bunzip2 = join_path(prefix.bin, 'bunzip2')
- remove(bunzip2)
- symlink(bzip2_exe, bunzip2)
-
- bzcat = join_path(prefix.bin, 'bzcat')
- remove(bzcat)
- symlink(bzip2_exe, bzcat)
diff --git a/var/spack/packages/cube/package.py b/var/spack/packages/cube/package.py
deleted file mode 100644
index d97cd25636..0000000000
--- a/var/spack/packages/cube/package.py
+++ /dev/null
@@ -1,55 +0,0 @@
-# FIXME: Add copyright statement
-#
-from spack import *
-from contextlib import closing
-
-class Cube(Package):
- """Cube the profile viewer for Score-P and Scalasca profiles. It
- displays a multi-dimensional performance space consisting
- of the dimensions (i) performance metric, (ii) call path,
- and (iii) system resource."""
-
- homepage = "http://www.scalasca.org/software/cube-4.x/download.html"
- url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz"
-
- version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20')
-
- version('4.3TP1', 'a2090fbc7b2ba394bd5c09ba971e237f',
- url = 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz')
-
- # Using CC as C++ compiler provides quirky workaround for a Score-P build system attempt
- # to guess a matching C compiler when configuring scorep-score
- backend_user_provided = """\
-CC=cc
-CXX=CC
-F77=f77
-FC=f90
-#CFLAGS=-fPIC
-#CXXFLAGS=-fPIC
-"""
- frontend_user_provided = """\
-CC_FOR_BUILD=cc
-CXX_FOR_BUILD=CC
-F77_FOR_BUILD=f70
-FC_FOR_BUILD=f90
-"""
-
- def install(self, spec, prefix):
- # Use a custom compiler configuration, otherwise the score-p
- # build system messes with spack's compiler settings.
- # Create these three files in the build directory
-
- with closing(open("vendor/common/build-config/platforms/platform-backend-user-provided", "w")) as backend_file:
- backend_file.write(self.backend_user_provided)
- with closing(open("vendor/common/build-config/platforms/platform-frontend-user-provided", "w")) as frontend_file:
- frontend_file.write(self.frontend_user_provided)
-
- configure_args = ["--prefix=%s" % prefix,
- "--with-custom-compilers",
- "--without-paraver",
- "--without-gui"]
-
- configure(*configure_args)
-
- make(parallel=False)
- make("install", parallel=False)
diff --git a/var/spack/packages/czmq/package.py b/var/spack/packages/czmq/package.py
deleted file mode 100644
index a2f1947554..0000000000
--- a/var/spack/packages/czmq/package.py
+++ /dev/null
@@ -1,19 +0,0 @@
-from spack import *
-
-class Czmq(Package):
- """ A C interface to the ZMQ library """
- homepage = "http://czmq.zeromq.org"
- url = "https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz"
-
- version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz')
-
- depends_on('zeromq')
-
- def install(self, spec, prefix):
- bash = which("bash")
- bash("./autogen.sh")
- configure("--prefix=%s" % prefix)
-
- make()
- make("install")
-
diff --git a/var/spack/packages/git/package.py b/var/spack/packages/git/package.py
deleted file mode 100644
index 0f1a3ba05b..0000000000
--- a/var/spack/packages/git/package.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from spack import *
-
-class Git(Package):
- """Git is a free and open source distributed version control
- system designed to handle everything from small to very large
- projects with speed and efficiency."""
- homepage = "http://git-scm.com"
- url = "https://www.kernel.org/pub/software/scm/git/git-2.2.1.tar.xz"
-
- version('2.2.1', '43e01f9d96ba8c11611e0eef0d9f9f28')
-
- # Use system openssl.
- # depends_on("openssl")
-
- # Use system perl for now.
- # depends_on("perl")
- # depends_on("pcre")
-
- depends_on("zlib")
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix,
- "--without-pcre",
- "--without-python")
-
- make()
- make("install")
diff --git a/var/spack/packages/hdf5/package.py b/var/spack/packages/hdf5/package.py
deleted file mode 100644
index 992dd8ec70..0000000000
--- a/var/spack/packages/hdf5/package.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from spack import *
-
-class Hdf5(Package):
- """HDF5 is a data model, library, and file format for storing and managing
- data. It supports an unlimited variety of datatypes, and is designed for
- flexible and efficient I/O and for high volume and complex data.
- """
-
- homepage = "http://www.hdfgroup.org/HDF5/"
- url = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz"
- list_url = "http://www.hdfgroup.org/ftp/HDF5/releases"
- list_depth = 3
-
- version('1.8.13', 'c03426e9e77d7766944654280b467289')
-
- depends_on("mpi")
- depends_on("zlib")
-
- # TODO: currently hard-coded to use OpenMPI
- def install(self, spec, prefix):
-
- configure(
- "--prefix=%s" % prefix,
- "--with-zlib=%s" % spec['zlib'].prefix,
- "--enable-parallel",
- "--enable-shared",
- "CC=%s" % spec['mpich'].prefix.bin + "/mpicc",
- "CXX=%s" % spec['mpich'].prefix.bin + "/mpic++")
-
- make()
- make("install")
-
- def url_for_version(self, version):
- v = str(version)
-
- if version == Version("1.2.2"):
- return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz"
- elif version < Version("1.7"):
- return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz"
- else:
- return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz"
diff --git a/var/spack/packages/libpciaccess/package.py b/var/spack/packages/libpciaccess/package.py
deleted file mode 100644
index 6022fc34a3..0000000000
--- a/var/spack/packages/libpciaccess/package.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from spack import *
-
-class Libpciaccess(Package):
- """Generic PCI access library."""
-
- homepage = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/"
- url = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/"
-
- version('0.13.4', git='http://anongit.freedesktop.org/git/xorg/lib/libpciaccess.git',
- tag='libpciaccess-0.13.4')
-
- depends_on('autoconf')
- depends_on('libtool')
-
- def install(self, spec, prefix):
- from subprocess import call
- call(["./autogen.sh"])
- configure("--prefix=%s" % prefix)
-
- make()
- make("install")
diff --git a/var/spack/packages/libxml2/package.py b/var/spack/packages/libxml2/package.py
deleted file mode 100644
index 3a0af6b368..0000000000
--- a/var/spack/packages/libxml2/package.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from spack import *
-
-class Libxml2(Package):
- """Libxml2 is the XML C parser and toolkit developed for the Gnome
- project (but usable outside of the Gnome platform), it is free
- software available under the MIT License."""
- homepage = "http://xmlsoft.org"
- url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz"
-
- version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
-
- extends('python')
- depends_on('zlib')
- depends_on('xz')
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
-
- make()
- make("install")
diff --git a/var/spack/packages/llvm/package.py b/var/spack/packages/llvm/package.py
deleted file mode 100644
index 9d2be690bb..0000000000
--- a/var/spack/packages/llvm/package.py
+++ /dev/null
@@ -1,54 +0,0 @@
-##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Written by David Beckingsale, david@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://scalability-llnl.github.io/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-from spack import *
-
-class Llvm(Package):
- """The LLVM Project is a collection of modular and reusable compiler and
- toolchain technologies. Despite its name, LLVM has little to do with
- traditional virtual machines, though it does provide helpful libraries
- that can be used to build them. The name "LLVM" itself is not an acronym;
- it is the full name of the project.
- """
- homepage = "http://llvm.org/"
- list_url = "http://llvm.org/releases/download.html"
-
- version('3.5.1', '2d3d8004f38852aa679e5945b8ce0b14', url='http://llvm.org/releases/3.5.1/llvm-3.5.1.src.tar.xz')
- version('3.4.2', 'a20669f75967440de949ac3b1bad439c', url='http://llvm.org/releases/3.4.2/llvm-3.4.2.src.tar.gz')
- version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz')
- version('2.9', '793138412d2af2c7c7f54615f8943771', url='http://llvm.org/releases/2.9/llvm-2.9.tgz')
- version('2.8', '220d361b4d17051ff4bb21c64abe05ba', url='http://llvm.org/releases/2.8/llvm-2.8.tgz')
-
- def install(self, spec, prefix):
- env['CXXFLAGS'] = self.compiler.cxx11_flag
-
- with working_dir('spack-build', create=True):
- cmake('..',
- '-DLLVM_REQUIRES_RTTI=1',
- '-DPYTHON_EXECUTABLE=/usr/bin/python',
- '-DPYTHON_INCLUDE_DIR=/usr/include/python2.6',
- '-DPYTHON_LIBRARY=/usr/lib64/libpython2.6.so',
- *std_cmake_args)
- make()
- make("install")
diff --git a/var/spack/packages/metis/package.py b/var/spack/packages/metis/package.py
deleted file mode 100644
index 7ce5ae1925..0000000000
--- a/var/spack/packages/metis/package.py
+++ /dev/null
@@ -1,27 +0,0 @@
-from spack import *
-
-class Metis(Package):
- """METIS is a set of serial programs for partitioning graphs,
- partitioning finite element meshes, and producing fill reducing
- orderings for sparse matrices. The algorithms implemented in
- METIS are based on the multilevel recursive-bisection,
- multilevel k-way, and multi-constraint partitioning schemes."""
-
- homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview"
- url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz"
-
- version('5.1.0', '5465e67079419a69e0116de24fce58fe')
-
- depends_on('mpi')
-
- def install(self, spec, prefix):
- cmake(".",
- '-DGKLIB_PATH=%s/GKlib' % pwd(),
- '-DSHARED=1',
- '-DCMAKE_C_COMPILER=mpicc',
- '-DCMAKE_CXX_COMPILER=mpicxx',
- '-DSHARED=1',
- *std_cmake_args)
-
- make()
- make("install")
diff --git a/var/spack/packages/mrnet/package.py b/var/spack/packages/mrnet/package.py
deleted file mode 100644
index 6e9766f275..0000000000
--- a/var/spack/packages/mrnet/package.py
+++ /dev/null
@@ -1,20 +0,0 @@
-from spack import *
-
-class Mrnet(Package):
- """The MRNet Multi-Cast Reduction Network."""
- homepage = "http://paradyn.org/mrnet"
- url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
-
- version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
- version('4.1.0', '5a248298b395b329e2371bf25366115c')
-
- parallel = False
-
- depends_on("boost")
-
- def install(self, spec, prefix):
- configure("--prefix=%s" %prefix, "--enable-shared")
-
- make()
- make("install")
-
diff --git a/var/spack/packages/mvapich2/package.py b/var/spack/packages/mvapich2/package.py
deleted file mode 100644
index 93bce011b7..0000000000
--- a/var/spack/packages/mvapich2/package.py
+++ /dev/null
@@ -1,117 +0,0 @@
-import os
-from spack import *
-
-class Mvapich2(Package):
- """mvapich2 is an MPI implmenetation for infiniband networks."""
- homepage = "http://mvapich.cse.ohio-state.edu/"
-
- version('1.9', '5dc58ed08fd3142c260b70fe297e127c',
- url="http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz")
- patch('ad_lustre_rwcontig_open_source.patch', when='@1.9')
-
- version('2.0', '9fbb68a4111a8b6338e476dc657388b4',
- url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz')
-
- version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6',
- url='http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz')
-
- provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2
- provides('mpi@:3.0', when='@2.0') # MVAPICH2-2.0 supports MPI 3.0
-
- variant('psm', default=False, description="build with psm")
-
- variant('pmi', default=False, description="build with pmi")
- depends_on('pmgr_collective', when='+pmi')
-
- def install(self, spec, prefix):
- # we'll set different configure flags depending on our environment
- configure_args = []
-
- # TODO: The MPICH*_FLAGS have a different name for 1.9
-
- if '+debug' in spec:
- # set configure flags for debug build
- configure_args.append("--disable-fast")
- configure_args.append("--enable-g=dbg")
- configure_args.append("--enable-error-checking=runtime")
- configure_args.append("--enable-error-messages=all")
- configure_args.append("--enable-nmpi-as-mpi")
-
- if "%gnu" in spec:
- # set variables for GNU compilers
- os.environ['MPICHLIB_CFLAGS'] = "-g -O0"
- os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0"
- os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fno-second-underscore"
- os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fno-second-underscore"
- elif "%intel" in spec:
- # set variables for Inel compilers
- os.environ['MPICHLIB_CFLAGS'] = "-g -O0"
- os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0"
- os.environ['MPICHLIB_FFLAGS'] = "-g -O0"
- os.environ['MPICHLIB_F90FLAGS'] = "-g -O0"
- elif "%pgi" in spec:
- # set variables for PGI compilers
- os.environ['MPICHLIB_CFLAGS'] = "-g -O0 -fPIC"
- os.environ['MPICHLIB_CXXFLAGS'] = "-g -O0 -fPIC"
- os.environ['MPICHLIB_FFLAGS'] = "-g -O0 -fPIC"
- os.environ['MPICHLIB_F90FLAGS'] = "-g -O0 -fPIC"
-
- else:
- # set configure flags for normal optimizations
- configure_args.append("--enable-fast=all")
- configure_args.append("--enable-g=dbg")
- configure_args.append("--enable-nmpi-as-mpi")
-
- if "%gnu" in spec:
- # set variables for what compilers
- os.environ['MPICHLIB_CFLAGS'] = "-g -O2"
- os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2"
- os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fno-second-underscore"
- os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fno-second-underscore"
- elif "%intel" in spec:
- # set variables for Inel compilers
- os.environ['MPICHLIB_CFLAGS'] = "-g -O2"
- os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2"
- os.environ['MPICHLIB_FFLAGS'] = "-g -O2"
- os.environ['MPICHLIB_F90FLAGS'] = "-g -O2"
- elif "%pgi" in spec:
- # set variables for PGI compilers
- os.environ['MPICHLIB_CFLAGS'] = "-g -O2 -fPIC"
- os.environ['MPICHLIB_CXXFLAGS'] = "-g -O2 -fPIC"
- os.environ['MPICHLIB_FFLAGS'] = "-g -O2 -fPIC"
- os.environ['MPICHLIB_F90FLAGS'] = "-g -O2 -fPIC"
-
- # determine network type by variant
- if "+psm" in spec:
- # throw this flag on QLogic systems to use PSM
- configure_args.append("--with-device=ch3:psm")
- else:
- # throw this flag on IB systems
- configure_args.append("--with-device=ch3:mrail")
- configure_args.append("--with-rdma=gen2")
-
- if "+pmi" in spec:
- configure_args.append("--with-pmi=pmgr_collective" % spec['pmgr_collective'].prefix)
- else:
- configure_args.append("--with-pmi=slurm")
-
- # TODO: shared-memory build
-
- # TODO: CUDA
-
- # TODO: other file systems like panasis
-
- configure(
- "--prefix=" + prefix,
- "--enable-f77", "--enable-fc", "--enable-cxx",
- "--enable-shared", "--enable-sharedlibs=gcc",
- "--enable-debuginfo",
- "--with-pm=no",
- "--enable-romio", "--with-file-system=lustre+nfs+ufs",
- "--disable-mpe", "--without-mpe",
- "--disable-silent-rules",
- *configure_args)
-
- make()
-
- make("install")
diff --git a/var/spack/packages/netcdf/package.py b/var/spack/packages/netcdf/package.py
deleted file mode 100644
index 34284ea725..0000000000
--- a/var/spack/packages/netcdf/package.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from spack import *
-
-class Netcdf(Package):
- """NetCDF is a set of software libraries and self-describing, machine-independent
- data formats that support the creation, access, and sharing of array-oriented
- scientific data."""
-
- homepage = "http://www.unidata.ucar.edu/software/netcdf/"
- url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz"
-
- version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae')
-
- # Dependencies:
- # >HDF5
- depends_on("hdf5")
-
- def install(self, spec, prefix):
- configure(
- "--prefix=%s" % prefix,
- "--disable-dap", # Disable DAP.
- "--disable-shared", # Don't build shared libraries (use static libs).
- "CPPFLAGS=-I%s/include" % spec['hdf5'].prefix, # Link HDF5's include dir.
- "LDFLAGS=-L%s/lib" % spec['hdf5'].prefix) # Link HDF5's lib dir.
-
- make("install")
-
- # Check the newly installed netcdf package. Currently disabled.
- # make("check")
diff --git a/var/spack/packages/opari2/package.py b/var/spack/packages/opari2/package.py
deleted file mode 100644
index daaee61e3a..0000000000
--- a/var/spack/packages/opari2/package.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# FIXME: Add copyright statement here
-
-from spack import *
-from contextlib import closing
-
-class Opari2(Package):
- """OPARI2 is a source-to-source instrumentation tool for OpenMP and
- hybrid codes. It surrounds OpenMP directives and runtime library
- calls with calls to the POMP2 measurement interface.
- OPARI2 will provide you with a new initialization method that allows
- for multi-directory and parallel builds as well as the usage of
- pre-instrumented libraries. Furthermore, an efficient way of
- tracking parent-child relationships was added. Additionally, we
- extended OPARI2 to support instrumentation of OpenMP 3.0
- tied tasks. """
-
- homepage = "http://www.vi-hps.org/projects/score-p"
- url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz"
-
- version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e')
-
- backend_user_provided = """\
-CC=cc
-CXX=c++
-F77=f77
-FC=f90
-CFLAGS=-fPIC
-CXXFLAGS=-fPIC
-"""
- frontend_user_provided = """\
-CC_FOR_BUILD=cc
-CXX_FOR_BUILD=c++
-F77_FOR_BUILD=f70
-FC_FOR_BUILD=f90
-CFLAGS_FOR_BUILD=-fPIC
-CXXFLAGS_FOR_BUILD=-fPIC
-"""
- mpi_user_provided = """\
-MPICC=mpicc
-MPICXX=mpicxx
-MPIF77=mpif77
-MPIFC=mpif90
-MPI_CFLAGS=-fPIC
-MPI_CXXFLAGS=-fPIC
-"""
-
- def install(self, spec, prefix):
- # Use a custom compiler configuration, otherwise the score-p
- # build system messes with spack's compiler settings.
- # Create these three files in the build directory
- with closing(open("platform-backend-user-provided", "w")) as backend_file:
- backend_file.write(self.backend_user_provided)
- with closing(open("platform-frontend-user-provided", "w")) as frontend_file:
- frontend_file.write(self.frontend_user_provided)
- with closing(open("platform-mpi-user-provided", "w")) as mpi_file:
- mpi_file.write(self.mpi_user_provided)
-
- # FIXME: Modify the configure line to suit your build system here.
- configure("--prefix=%s" % prefix,
- "--with-custom-compilers",
- "--enable-shared")
-
- # FIXME: Add logic to build and install here
- make()
- make("install")
diff --git a/var/spack/packages/openmpi/package.py b/var/spack/packages/openmpi/package.py
deleted file mode 100644
index 7e84cbaf65..0000000000
--- a/var/spack/packages/openmpi/package.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from spack import *
-
-class Openmpi(Package):
- """Open MPI is a project combining technologies and resources from
- several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI)
- in order to build the best MPI library available. A completely
- new MPI-2 compliant implementation, Open MPI offers advantages
- for system and software vendors, application developers and
- computer science researchers.
- """
-
- homepage = "http://www.open-mpi.org"
-
- version('1.8.2', 'ab538ed8e328079d566fc797792e016e',
- url='http://www.open-mpi.org/software/ompi/v1.8/downloads/openmpi-1.8.2.tar.gz')
- version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475',
- url = "http://www.open-mpi.org/software/ompi/v1.6/downloads/openmpi-1.6.5.tar.bz2")
-
- patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5")
- patch('llnl-platforms.patch', when="@1.6.5")
-
- provides('mpi@:2')
-
- def install(self, spec, prefix):
- config_args = ["--prefix=%s" % prefix]
-
- # TODO: use variants for this, e.g. +lanl, +llnl, etc.
- # use this for LANL builds, but for LLNL builds, we need:
- # "--with-platform=contrib/platform/llnl/optimized"
- if self.version == ver("1.6.5") and '+lanl' in spec:
- config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas")
-
- # TODO: Spack should make it so that you can't actually find
- # these compilers if they're "disabled" for the current
- # compiler configuration.
- if not self.compiler.f77 and not self.compiler.fc:
- config_args.append("--enable-mpi-fortran=no")
-
- configure(*config_args)
- make()
- make("install")
diff --git a/var/spack/packages/openssl/package.py b/var/spack/packages/openssl/package.py
deleted file mode 100644
index c5a8aeb9dc..0000000000
--- a/var/spack/packages/openssl/package.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from spack import *
-
-class Openssl(Package):
- """The OpenSSL Project is a collaborative effort to develop a
- robust, commercial-grade, full-featured, and Open Source
- toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
- Transport Layer Security (TLS v1) protocols as well as a
- full-strength general purpose cryptography library."""
- homepage = "http://www.openssl.org"
- url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
-
- version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
-
- depends_on("zlib")
- parallel = False
-
- def install(self, spec, prefix):
- config = Executable("./config")
- config("--prefix=%s" % prefix,
- "--openssldir=%s/etc/openssl" % prefix,
- "zlib",
- "no-krb5",
- "shared")
-
- make()
- make("install")
diff --git a/var/spack/packages/otf2/package.py b/var/spack/packages/otf2/package.py
deleted file mode 100644
index fa0a5898b6..0000000000
--- a/var/spack/packages/otf2/package.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# FIXME: Add copyright
-
-from spack import *
-from contextlib import closing
-import os
-
-class Otf2(Package):
- """The Open Trace Format 2 is a highly scalable, memory efficient event
- trace data format plus support library."""
-
- homepage = "http://www.vi-hps.org/score-p"
- url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz"
-
- version('1.4', 'a23c42e936eb9209c4e08b61c3cf5092',
- url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz")
- version('1.3.1', 'd0ffc4e858455ace4f596f910e68c9f2',
- url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.3.1.tar.gz")
- version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8',
- url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz")
-
- backend_user_provided = """\
-CC=cc
-CXX=c++
-F77=f77
-FC=f90
-CFLAGS=-fPIC
-CXXFLAGS=-fPIC
-"""
- frontend_user_provided = """\
-CC_FOR_BUILD=cc
-CXX_FOR_BUILD=c++
-F77_FOR_BUILD=f70
-FC_FOR_BUILD=f90
-CFLAGS_FOR_BUILD=-fPIC
-CXXFLAGS_FOR_BUILD=-fPIC
-"""
- mpi_user_provided = """\
-MPICC=cc
-MPICXX=c++
-MPIF77=f77
-MPIFC=f90
-MPI_CFLAGS=-fPIC
-MPI_CXXFLAGS=-fPIC
-"""
-
- @when('@:1.2.1')
- def version_specific_args(self):
- return ["--with-platform=disabled", "CC=cc", "CXX=c++", "F77=f77", "F90=f90", "CFLAGS=-fPIC", "CXXFLAGS=-fPIC"]
-
- @when('@1.3:')
- def version_specific_args(self):
- # TODO: figure out what scorep's build does as of otf2 1.3
- return ["--with-custom-compilers"]
-
- def install(self, spec, prefix):
- # Use a custom compiler configuration, otherwise the score-p
- # build system messes with spack's compiler settings.
- # Create these three files in the build directory
- with closing(open("platform-backend-user-provided", "w")) as backend_file:
- backend_file.write(self.backend_user_provided)
- with closing(open("platform-frontend-user-provided", "w")) as frontend_file:
- frontend_file.write(self.frontend_user_provided)
- with closing(open("platform-mpi-user-provided", "w")) as mpi_file:
- mpi_file.write(self.mpi_user_provided)
-
- configure_args=["--prefix=%s" % prefix,
- "--enable-shared"]
-
- configure_args.extend(self.version_specific_args())
-
- configure(*configure_args)
-
- make()
- make("install")
diff --git a/var/spack/packages/parmetis/package.py b/var/spack/packages/parmetis/package.py
deleted file mode 100644
index d8cd337304..0000000000
--- a/var/spack/packages/parmetis/package.py
+++ /dev/null
@@ -1,26 +0,0 @@
-from spack import *
-
-class Parmetis(Package):
- """ParMETIS is an MPI-based parallel library that implements a
- variety of algorithms for partitioning unstructured graphs,
- meshes, and for computing fill-reducing orderings of sparse
- matrices."""
- homepage = "http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview"
- url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz"
-
- version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628')
-
- depends_on('mpi')
-
- def install(self, spec, prefix):
- cmake(".",
- '-DGKLIB_PATH=%s/metis/GKlib' % pwd(),
- '-DMETIS_PATH=%s/metis' % pwd(),
- '-DSHARED=1',
- '-DCMAKE_C_COMPILER=mpicc',
- '-DCMAKE_CXX_COMPILER=mpicxx',
- '-DSHARED=1',
- *std_cmake_args)
-
- make()
- make("install")
diff --git a/var/spack/packages/py-numpy/package.py b/var/spack/packages/py-numpy/package.py
deleted file mode 100644
index efa109a3e9..0000000000
--- a/var/spack/packages/py-numpy/package.py
+++ /dev/null
@@ -1,28 +0,0 @@
-from spack import *
-
-class PyNumpy(Package):
- """array processing for numbers, strings, records, and objects."""
- homepage = "https://pypi.python.org/pypi/numpy"
- url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz"
-
- version('1.9.1', '78842b73560ec378142665e712ae4ad9')
- version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645')
-
- extends('python')
- depends_on('py-nose')
- depends_on('netlib-blas+fpic')
- depends_on('netlib-lapack+shared')
-
- def patch(self):
- filter_file(
- "possible_executables = \['(gfortran|g77|ifort|efl)",
- "possible_executables = ['fc",
- "numpy/distutils/fcompiler/gnu.py",
- "numpy/distutils/fcompiler/intel.py")
-
- def install(self, spec, prefix):
- with open('site.cfg', 'w') as f:
- f.write('[DEFAULT]\n')
- f.write('libraries=lapack,blas\n')
- f.write('library_dirs=%s/lib:%s/lib\n' % (spec['blas'].prefix, spec['lapack'].prefix))
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/scalasca/package.py b/var/spack/packages/scalasca/package.py
deleted file mode 100644
index cf7a40c1f5..0000000000
--- a/var/spack/packages/scalasca/package.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# FIXME: Add copyright
-
-from spack import *
-
-class Scalasca(Package):
- """Scalasca is a software tool that supports the performance optimization
- of parallel programs by measuring and analyzing their runtime behavior.
- The analysis identifies potential performance bottlenecks - in
- particular those concerning communication and synchronization - and
- offers guidance in exploring their causes."""
-
- # FIXME: add a proper url for your package's homepage here.
- homepage = "http://www.scalasca.org"
- url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz"
-
- version('2.1', 'bab9c2b021e51e2ba187feec442b96e6',
- url = 'http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz' )
-
- depends_on("mpi")
- depends_on("otf2@1.4")
- depends_on("cube@4.2.3")
-
- backend_user_provided = """\
-CC=cc
-CXX=c++
-F77=f77
-FC=f90
-CFLAGS=-fPIC
-CXXFLAGS=-fPIC
-"""
- frontend_user_provided = """\
-CC_FOR_BUILD=cc
-CXX_FOR_BUILD=c++
-F77_FOR_BUILD=f70
-FC_FOR_BUILD=f90
-CFLAGS_FOR_BUILD=-fPIC
-CXXFLAGS_FOR_BUILD=-fPIC
-"""
- mpi_user_provided = """\
-MPICC=mpicc
-MPICXX=mpicxx
-MPIF77=mpif77
-MPIFC=mpif90
-MPI_CFLAGS=-fPIC
-MPI_CXXFLAGS=-fPIC
-"""
-
- def install(self, spec, prefix):
- configure_args = ["--prefix=%s" % prefix,
- "--with-custom-compilers",
- "--with-otf2=%s" % spec['otf2'].prefix.bin,
- "--with-cube=%s" % spec['cube'].prefix.bin,
- "--enable-shared"]
-
- configure(*configure_args)
-
- make()
- make("install")
-
- # FIXME: Modify the configure line to suit your build system here.
- configure("--prefix=%s" % prefix)
-
- # FIXME: Add logic to build and install here
- make()
- make("install")
diff --git a/var/spack/packages/scorep/package.py b/var/spack/packages/scorep/package.py
deleted file mode 100644
index f013bd1cbb..0000000000
--- a/var/spack/packages/scorep/package.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# FIXME: Add copyright statement
-
-from spack import *
-
-class Scorep(Package):
- """The Score-P measurement infrastructure is a highly scalable and
- easy-to-use tool suite for profiling, event tracing, and online
- analysis of HPC applications."""
-
- # FIXME: add a proper url for your package's homepage here.
- homepage = "http://www.vi-hps.org/projects/score-p"
- url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz"
-
- version('1.3', '9db6f957b7f51fa01377a9537867a55c',
- url = 'http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz')
-
- version('1.2.3', '4978084e7cbd05b94517aa8beaea0817')
-
- depends_on("mpi")
- depends_on("papi")
- # depends_on("otf2@1.2:1.2.1") # only Score-P 1.2.x
- depends_on("otf2")
- depends_on("opari2")
- depends_on("cube@4.2:4.2.3")
-
- backend_user_provided = """\
-CC=cc
-CXX=c++
-F77=f77
-FC=f90
-CFLAGS=-fPIC
-CXXFLAGS=-fPIC
-"""
- frontend_user_provided = """\
-CC_FOR_BUILD=cc
-CXX_FOR_BUILD=c++
-F77_FOR_BUILD=f70
-FC_FOR_BUILD=f90
-CFLAGS_FOR_BUILD=-fPIC
-CXXFLAGS_FOR_BUILD=-fPIC
-"""
- mpi_user_provided = """\
-MPICC=mpicc
-MPICXX=mpicxx
-MPIF77=mpif77
-MPIFC=mpif90
-MPI_CFLAGS=-fPIC
-MPI_CXXFLAGS=-fPIC
-"""
-
- def install(self, spec, prefix):
- # Use a custom compiler configuration, otherwise the score-p
- # build system messes with spack's compiler settings.
- # Create these three files in the build directory
- with open("platform-backend-user-provided", "w") as backend_file:
- backend_file.write(self.backend_user_provided)
- with open("platform-frontend-user-provided", "w") as frontend_file:
- frontend_file.write(self.frontend_user_provided)
- with open("platform-mpi-user-provided", "w") as mpi_file:
- mpi_file.write(self.mpi_user_provided)
-
- configure_args = ["--prefix=%s" % prefix,
- "--with-custom-compilers",
- "--with-otf2=%s" % spec['otf2'].prefix.bin,
- "--with-opari2=%s" % spec['opari2'].prefix.bin,
- "--with-cube=%s" % spec['cube'].prefix.bin,
- "--with-papi-header=%s" % spec['papi'].prefix.include,
- "--with-papi-lib=%s" % spec['papi'].prefix.lib,
- "--enable-shared"]
-
- configure(*configure_args)
-
- make()
- make("install")
diff --git a/var/spack/packages/scotch/package.py b/var/spack/packages/scotch/package.py
deleted file mode 100644
index 79289ff2ad..0000000000
--- a/var/spack/packages/scotch/package.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from spack import *
-import glob
-import os
-
-class Scotch(Package):
- """Scotch is a software package for graph and mesh/hypergraph
- partitioning, graph clustering, and sparse matrix ordering."""
- homepage = "http://www.labri.fr/perso/pelegrin/scotch/"
- url = "http://gforge.inria.fr/frs/download.php/file/34099/scotch_6.0.3.tar.gz"
- list_url = "http://gforge.inria.fr/frs/?group_id=248"
-
- version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc')
-
- depends_on('mpi')
-
-
- def patch(self):
- with working_dir('src/Make.inc'):
- makefiles = glob.glob('Makefile.inc.x86-64_pc_linux2*')
- filter_file(r'^CCS\s*=.*$', 'CCS = cc', *makefiles)
- filter_file(r'^CCD\s*=.*$', 'CCD = cc', *makefiles)
-
-
- def install(self, spec, prefix):
- # Currently support gcc and icc on x86_64 (maybe others with
- # vanilla makefile)
- makefile = 'Make.inc/Makefile.inc.x86-64_pc_linux2'
- if spec.satisfies('%icc'):
- makefile += '.icc'
-
- with working_dir('src'):
- force_symlink(makefile, 'Makefile.inc')
- for app in ('scotch', 'ptscotch'):
- make(app)
-
- install_tree('bin', prefix.bin)
- install_tree('lib', prefix.lib)
- install_tree('include', prefix.include)
- install_tree('man/man1', prefix.share_man1)
-
diff --git a/var/spack/packages/tau/package.py b/var/spack/packages/tau/package.py
deleted file mode 100644
index 048fac80aa..0000000000
--- a/var/spack/packages/tau/package.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from spack import *
-
-import os
-from llnl.util.filesystem import join_path
-
-class Tau(Package):
- """A portable profiling and tracing toolkit for performance
- analysis of parallel programs written in Fortran, C, C++, UPC,
- Java, Python."""
- homepage = "http://www.cs.uoregon.edu/research/tau"
- url = "http://www.cs.uoregon.edu/research/paracomp/tau/tauprofile/dist/tau-2.23.1.tar.gz"
-
- version('2.23.1', '6593b47ae1e7a838e632652f0426fe72')
-
- def install(self, spec, prefix):
- # TAU isn't happy with directories that have '@' in the path. Sigh.
- change_sed_delimiter('@', ';', 'configure')
- change_sed_delimiter('@', ';', 'utils/FixMakefile')
- change_sed_delimiter('@', ';', 'utils/FixMakefile.sed.default')
-
- # After that, it's relatively standard.
- configure("-prefix=%s" % prefix)
- make("install")
-
- # Link arch-specific directories into prefix since there is
- # only one arch per prefix the way spack installs.
- self.link_tau_arch_dirs()
-
-
- def link_tau_arch_dirs(self):
- for subdir in os.listdir(self.prefix):
- for d in ('bin', 'lib'):
- src = join_path(self.prefix, subdir, d)
- dest = join_path(self.prefix, d)
- if os.path.isdir(src) and not os.path.exists(dest):
- os.symlink(join_path(subdir, d), dest)
diff --git a/var/spack/packages/zsh/package.py b/var/spack/packages/zsh/package.py
deleted file mode 100644
index 99ef9de2e5..0000000000
--- a/var/spack/packages/zsh/package.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from spack import *
-
-class Zsh(Package):
- """ The ZSH shell """
- homepage = "http://www.zsh.org"
- url = "http://www.zsh.org/pub/zsh-5.0.8.tar.bz2"
-
- version('5.0.8', 'e6759e8dd7b714d624feffd0a73ba0fe')
-
- depends_on("pcre")
-
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
-
- make()
- make("install")
diff --git a/var/spack/mock_packages/a/package.py b/var/spack/repos/builtin.mock/packages/a/package.py
index fa63c08df0..fa63c08df0 100644
--- a/var/spack/mock_packages/a/package.py
+++ b/var/spack/repos/builtin.mock/packages/a/package.py
diff --git a/var/spack/mock_packages/b/package.py b/var/spack/repos/builtin.mock/packages/b/package.py
index cb88aa2157..cb88aa2157 100644
--- a/var/spack/mock_packages/b/package.py
+++ b/var/spack/repos/builtin.mock/packages/b/package.py
diff --git a/var/spack/mock_packages/c/package.py b/var/spack/repos/builtin.mock/packages/c/package.py
index f51b913fa9..f51b913fa9 100644
--- a/var/spack/mock_packages/c/package.py
+++ b/var/spack/repos/builtin.mock/packages/c/package.py
diff --git a/var/spack/mock_packages/callpath/package.py b/var/spack/repos/builtin.mock/packages/callpath/package.py
index 5b6b70ba2a..abc576f78f 100644
--- a/var/spack/mock_packages/callpath/package.py
+++ b/var/spack/repos/builtin.mock/packages/callpath/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/direct_mpich/package.py b/var/spack/repos/builtin.mock/packages/direct_mpich/package.py
index 2ced82521b..efe7fc2afc 100644
--- a/var/spack/mock_packages/direct_mpich/package.py
+++ b/var/spack/repos/builtin.mock/packages/direct_mpich/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/dyninst/package.py b/var/spack/repos/builtin.mock/packages/dyninst/package.py
index 7998578da1..ea57950865 100644
--- a/var/spack/mock_packages/dyninst/package.py
+++ b/var/spack/repos/builtin.mock/packages/dyninst/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/e/package.py b/var/spack/repos/builtin.mock/packages/e/package.py
index 76c6b64c7f..76c6b64c7f 100644
--- a/var/spack/mock_packages/e/package.py
+++ b/var/spack/repos/builtin.mock/packages/e/package.py
diff --git a/var/spack/mock_packages/externalprereq/package.py b/var/spack/repos/builtin.mock/packages/externalprereq/package.py
index 7d63925693..7d63925693 100644
--- a/var/spack/mock_packages/externalprereq/package.py
+++ b/var/spack/repos/builtin.mock/packages/externalprereq/package.py
diff --git a/var/spack/mock_packages/externaltest/package.py b/var/spack/repos/builtin.mock/packages/externaltest/package.py
index c546922f87..c546922f87 100644
--- a/var/spack/mock_packages/externaltest/package.py
+++ b/var/spack/repos/builtin.mock/packages/externaltest/package.py
diff --git a/var/spack/mock_packages/externaltool/package.py b/var/spack/repos/builtin.mock/packages/externaltool/package.py
index af902bd70e..af902bd70e 100644
--- a/var/spack/mock_packages/externaltool/package.py
+++ b/var/spack/repos/builtin.mock/packages/externaltool/package.py
diff --git a/var/spack/mock_packages/externalvirtual/package.py b/var/spack/repos/builtin.mock/packages/externalvirtual/package.py
index 722c1e1c53..722c1e1c53 100644
--- a/var/spack/mock_packages/externalvirtual/package.py
+++ b/var/spack/repos/builtin.mock/packages/externalvirtual/package.py
diff --git a/var/spack/mock_packages/fake/package.py b/var/spack/repos/builtin.mock/packages/fake/package.py
index fb3c2bdd2e..5f81ef20fc 100644
--- a/var/spack/mock_packages/fake/package.py
+++ b/var/spack/repos/builtin.mock/packages/fake/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/git-test/package.py b/var/spack/repos/builtin.mock/packages/git-test/package.py
index 689185463c..689185463c 100644
--- a/var/spack/mock_packages/git-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/git-test/package.py
diff --git a/var/spack/mock_packages/hg-test/package.py b/var/spack/repos/builtin.mock/packages/hg-test/package.py
index 462f1e4c3a..462f1e4c3a 100644
--- a/var/spack/mock_packages/hg-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/hg-test/package.py
diff --git a/var/spack/mock_packages/indirect_mpich/package.py b/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py
index daf8b4b166..0b1773a27b 100644
--- a/var/spack/mock_packages/indirect_mpich/package.py
+++ b/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/libdwarf/package.py b/var/spack/repos/builtin.mock/packages/libdwarf/package.py
index 0b8df04cfb..e486a5de03 100644
--- a/var/spack/mock_packages/libdwarf/package.py
+++ b/var/spack/repos/builtin.mock/packages/libdwarf/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/libelf/package.py b/var/spack/repos/builtin.mock/packages/libelf/package.py
index 94c8f942cd..5e5b0b7143 100644
--- a/var/spack/mock_packages/libelf/package.py
+++ b/var/spack/repos/builtin.mock/packages/libelf/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/mpich/package.py b/var/spack/repos/builtin.mock/packages/mpich/package.py
index e4110ad530..836d9c4a9f 100644
--- a/var/spack/mock_packages/mpich/package.py
+++ b/var/spack/repos/builtin.mock/packages/mpich/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/mpich2/package.py b/var/spack/repos/builtin.mock/packages/mpich2/package.py
index 827b94c8a4..90f969b898 100644
--- a/var/spack/mock_packages/mpich2/package.py
+++ b/var/spack/repos/builtin.mock/packages/mpich2/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/mpileaks/package.py b/var/spack/repos/builtin.mock/packages/mpileaks/package.py
index 3989f1b452..9a18c5e1f2 100644
--- a/var/spack/mock_packages/mpileaks/package.py
+++ b/var/spack/repos/builtin.mock/packages/mpileaks/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/multimethod/package.py b/var/spack/repos/builtin.mock/packages/multimethod/package.py
index 75b1606ffc..ea103fe175 100644
--- a/var/spack/mock_packages/multimethod/package.py
+++ b/var/spack/repos/builtin.mock/packages/multimethod/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/repos/builtin.mock/packages/netlib-blas/package.py b/var/spack/repos/builtin.mock/packages/netlib-blas/package.py
new file mode 100644
index 0000000000..39f2c92ae5
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/netlib-blas/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class NetlibBlas(Package):
+ homepage = "http://www.netlib.org/lapack/"
+ url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
+
+ version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf')
+
+ provides('blas')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py
new file mode 100644
index 0000000000..331844e544
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class NetlibLapack(Package):
+ homepage = "http://www.netlib.org/lapack/"
+ url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
+
+ version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf')
+
+ provides('lapack')
+ depends_on('blas')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py
new file mode 100644
index 0000000000..c7771b92a3
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/openblas/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+class Openblas(Package):
+ """OpenBLAS: An optimized BLAS library"""
+ homepage = "http://www.openblas.net"
+ url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+
+ version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
+
+ provides('blas')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/mock_packages/optional-dep-test-2/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py
index ef0587588e..ef0587588e 100644
--- a/var/spack/mock_packages/optional-dep-test-2/package.py
+++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py
diff --git a/var/spack/mock_packages/optional-dep-test-3/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py
index e6cb3bd6e7..e6cb3bd6e7 100644
--- a/var/spack/mock_packages/optional-dep-test-3/package.py
+++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py
diff --git a/var/spack/mock_packages/optional-dep-test/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py
index bb57576ca9..bb57576ca9 100644
--- a/var/spack/mock_packages/optional-dep-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py
diff --git a/var/spack/mock_packages/svn-test/package.py b/var/spack/repos/builtin.mock/packages/svn-test/package.py
index ba4d5522b4..ba4d5522b4 100644
--- a/var/spack/mock_packages/svn-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/svn-test/package.py
diff --git a/var/spack/mock_packages/trivial_install_test_package/package.py b/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py
index c4db9f5f07..fec5849e67 100644
--- a/var/spack/mock_packages/trivial_install_test_package/package.py
+++ b/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/mock_packages/zmpi/package.py b/var/spack/repos/builtin.mock/packages/zmpi/package.py
index 8c6ceda6d3..201fac2fbf 100644
--- a/var/spack/mock_packages/zmpi/package.py
+++ b/var/spack/repos/builtin.mock/packages/zmpi/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/repos/builtin.mock/repo.yaml b/var/spack/repos/builtin.mock/repo.yaml
new file mode 100644
index 0000000000..30b068da13
--- /dev/null
+++ b/var/spack/repos/builtin.mock/repo.yaml
@@ -0,0 +1,2 @@
+repo:
+ namespace: builtin.mock
diff --git a/var/spack/packages/ImageMagick/package.py b/var/spack/repos/builtin/packages/ImageMagick/package.py
index 753ea80ca6..753ea80ca6 100644
--- a/var/spack/packages/ImageMagick/package.py
+++ b/var/spack/repos/builtin/packages/ImageMagick/package.py
diff --git a/var/spack/packages/Mitos/package.py b/var/spack/repos/builtin/packages/Mitos/package.py
index e312da3ffc..ea131872dd 100644
--- a/var/spack/packages/Mitos/package.py
+++ b/var/spack/repos/builtin/packages/Mitos/package.py
@@ -4,13 +4,20 @@ class Mitos(Package):
"""Mitos is a library and a tool for collecting sampled memory
performance data to view with MemAxes"""
- homepage = "https://github.com/scalability-llnl/Mitos"
- url = "https://github.com/scalability-llnl/Mitos"
+ homepage = "https://github.com/llnl/Mitos"
+ url = "https://github.com/llnl/Mitos"
- version('0.9.1', 'c6cb57f3cae54f5157affd97ef7ef79e', git='https://github.com/scalability-llnl/Mitos.git', tag='v0.9.1')
+ version('0.9.2',
+ git='https://github.com/llnl/Mitos.git',
+ commit='8cb143a2e8c00353ff531a781a9ca0992b0aaa3d')
+
+ version('0.9.1',
+ git='https://github.com/llnl/Mitos.git',
+ tag='v0.9.1')
depends_on('dyninst@8.2.1:')
depends_on('hwloc')
+ depends_on('mpi')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py
new file mode 100644
index 0000000000..2471dff09b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/R/package.py
@@ -0,0 +1,49 @@
+from spack import *
+
+
+class R(Package):
+ """
+ R is 'GNU S', a freely available language and environment for statistical computing and graphics which provides a
+ wide variety of statistical and graphical techniques: linear and nonlinear modelling, statistical tests, time series
+ analysis, classification, clustering, etc. Please consult the R project homepage for further information.
+ """
+ homepage = "https://www.r-project.org"
+ url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
+
+ version('3.2.3', '1ba3dac113efab69e706902810cc2970')
+ version('3.2.2', '57cef5c2e210a5454da1979562a10e5b')
+ version('3.2.1', 'c2aac8b40f84e08e7f8c9068de9239a3')
+ version('3.2.0', '66fa17ad457d7e618191aa0f52fc402e')
+ version('3.1.3', '53a85b884925aa6b5811dfc361d73fc4')
+ version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74')
+
+ variant('external-lapack', default=False, description='Links to externally installed BLAS/LAPACK')
+
+ # Virtual dependencies
+ depends_on('blas', when='+external-lapack')
+ depends_on('lapack', when='+external-lapack')
+
+ # Concrete dependencies
+ depends_on('readline')
+ depends_on('ncurses')
+ depends_on('icu')
+ depends_on('glib')
+ depends_on('zlib')
+ depends_on('libtiff')
+ depends_on('jpeg')
+ depends_on('cairo')
+ depends_on('pango')
+ depends_on('freetype')
+ depends_on('tcl')
+ depends_on('tk')
+
+ def install(self, spec, prefix):
+ options = ['--prefix=%s' % prefix,
+ '--enable-R-shlib',
+ '--enable-BLAS-shlib']
+ if '+external-lapack' in spec:
+ options.extend(['--with-blas', '--with-lapack'])
+
+ configure(*options)
+ make()
+ make('install')
diff --git a/var/spack/packages/SAMRAI/no-tool-build.patch b/var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch
index 1adf0cf721..1adf0cf721 100644
--- a/var/spack/packages/SAMRAI/no-tool-build.patch
+++ b/var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch
diff --git a/var/spack/packages/SAMRAI/package.py b/var/spack/repos/builtin/packages/SAMRAI/package.py
index eef041f0d5..2c3b9180af 100644
--- a/var/spack/packages/SAMRAI/package.py
+++ b/var/spack/repos/builtin/packages/SAMRAI/package.py
@@ -12,6 +12,7 @@ class Samrai(Package):
list_url = homepage
version('3.9.1', '232d04d0c995f5abf20d94350befd0b2')
+ version('3.8.0', 'c18fcffa706346bfa5828b36787ce5fe')
version('3.7.3', '12d574eacadf8c9a70f1bb4cd1a69df6')
version('3.7.2', 'f6a716f171c9fdbf3cb12f71fa6e2737')
version('3.6.3-beta', 'ef0510bf2893042daedaca434e5ec6ce')
@@ -24,7 +25,7 @@ class Samrai(Package):
depends_on("mpi")
depends_on("zlib")
- depends_on("hdf5")
+ depends_on("hdf5+mpi")
depends_on("boost")
# don't build tools with gcc
@@ -32,13 +33,10 @@ class Samrai(Package):
# TODO: currently hard-coded to use openmpi - be careful!
def install(self, spec, prefix):
- mpi = next(m for m in ('openmpi', 'mpich', 'mvapich')
- if m in spec)
-
configure(
"--prefix=%s" % prefix,
- "--with-CXX=%s" % spec[mpi].prefix.bin + "/mpic++",
- "--with-CC=%s" % spec[mpi].prefix.bin + "/mpicc",
+ "--with-CXX=%s" % spec['mpi'].prefix.bin + "/mpic++",
+ "--with-CC=%s" % spec['mpi'].prefix.bin + "/mpicc",
"--with-hdf5=%s" % spec['hdf5'].prefix,
"--with-boost=%s" % spec['boost'].prefix,
"--with-zlib=%s" % spec['zlib'].prefix,
diff --git a/var/spack/repos/builtin/packages/activeharmony/package.py b/var/spack/repos/builtin/packages/activeharmony/package.py
new file mode 100644
index 0000000000..45dcc7c0e8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/activeharmony/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+class Activeharmony(Package):
+ """Active Harmony: a framework for auto-tuning (the automated search for values to improve the performance of a target application)."""
+ homepage = "http://www.dyninst.org/harmony"
+ url = "http://www.dyninst.org/sites/default/files/downloads/harmony/ah-4.5.tar.gz"
+
+ version('4.5', 'caee5b864716d376e2c25d739251b2a9')
+
+ def install(self, spec, prefix):
+ make("CFLAGS=-O3")
+ make("install", 'PREFIX=%s' % prefix)
+
+from spack import *
+
diff --git a/var/spack/packages/adept-utils/package.py b/var/spack/repos/builtin/packages/adept-utils/package.py
index e4a2e1523f..fb59576c21 100644
--- a/var/spack/packages/adept-utils/package.py
+++ b/var/spack/repos/builtin/packages/adept-utils/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -27,8 +27,8 @@ from spack import *
class AdeptUtils(Package):
"""Utility libraries for LLNL performance tools."""
- homepage = "https://github.com/scalability-llnl/adept-utils"
- url = "https://github.com/scalability-llnl/adept-utils/archive/v1.0.tar.gz"
+ homepage = "https://github.com/llnl/adept-utils"
+ url = "https://github.com/llnl/adept-utils/archive/v1.0.tar.gz"
version('1.0.1', '731a310717adcb004d9d195130efee7d')
version('1.0', '5c6cd9badce56c945ac8551e34804397')
diff --git a/var/spack/repos/builtin/packages/apex/package.py b/var/spack/repos/builtin/packages/apex/package.py
new file mode 100644
index 0000000000..8769d97056
--- /dev/null
+++ b/var/spack/repos/builtin/packages/apex/package.py
@@ -0,0 +1,31 @@
+from spack import *
+from spack.util.environment import *
+
+class Apex(Package):
+ homepage = "http://github.com/khuck/xpress-apex"
+ url = "http://github.com/khuck/xpress-apex/archive/v0.1.tar.gz"
+
+ version('0.1', '8b95f0c0313da1575960d3ad69f18e75')
+
+ depends_on("binutils+libiberty")
+ depends_on("boost@1.54:")
+ depends_on("cmake@2.8.12:")
+ depends_on("activeharmony@4.5:")
+ depends_on("ompt-openmp")
+
+ def install(self, spec, prefix):
+
+ path=get_path("PATH")
+ path.remove(spec["binutils"].prefix.bin)
+ path_set("PATH", path)
+ with working_dir("build", create=True):
+ cmake('-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DUSE_BFD=TRUE',
+ '-DBFD_ROOT=%s' % spec['binutils'].prefix,
+ '-DUSE_ACTIVEHARMONY=TRUE',
+ '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix,
+ '-DUSE_OMPT=TRUE',
+ '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix,
+ '..', *std_cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/packages/arpack/package.py b/var/spack/repos/builtin/packages/arpack/package.py
index 8c67c536f3..8c67c536f3 100644
--- a/var/spack/packages/arpack/package.py
+++ b/var/spack/repos/builtin/packages/arpack/package.py
diff --git a/var/spack/packages/asciidoc/package.py b/var/spack/repos/builtin/packages/asciidoc/package.py
index 828f3b3f4f..828f3b3f4f 100644
--- a/var/spack/packages/asciidoc/package.py
+++ b/var/spack/repos/builtin/packages/asciidoc/package.py
diff --git a/var/spack/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py
index 769805b227..769805b227 100644
--- a/var/spack/packages/atk/package.py
+++ b/var/spack/repos/builtin/packages/atk/package.py
diff --git a/var/spack/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py
index fc683363a7..fc683363a7 100644
--- a/var/spack/packages/atlas/package.py
+++ b/var/spack/repos/builtin/packages/atlas/package.py
diff --git a/var/spack/repos/builtin/packages/atop/package.py b/var/spack/repos/builtin/packages/atop/package.py
new file mode 100644
index 0000000000..346ab0763c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/atop/package.py
@@ -0,0 +1,16 @@
+from spack import *
+
+class Atop(Package):
+ """Atop is an ASCII full-screen performance monitor for Linux"""
+ homepage = "http://www.atoptool.nl/index.php"
+ url = "http://www.atoptool.nl/download/atop-2.2-3.tar.gz"
+
+ version('2.2-3', '034dc1544f2ec4e4d2c739d320dc326d')
+
+ def install(self, spec, prefix):
+ make()
+ mkdirp(prefix.bin)
+ install("atop", join_path(prefix.bin, "atop"))
+ mkdirp(join_path(prefix.man, "man1"))
+ install(join_path("man", "atop.1"),
+ join_path(prefix.man, "man1", "atop.1"))
diff --git a/var/spack/packages/autoconf/package.py b/var/spack/repos/builtin/packages/autoconf/package.py
index 5189faf054..5189faf054 100644
--- a/var/spack/packages/autoconf/package.py
+++ b/var/spack/repos/builtin/packages/autoconf/package.py
diff --git a/var/spack/packages/automaded/package.py b/var/spack/repos/builtin/packages/automaded/package.py
index 9fbd93e3b3..e0bc7efb8b 100644
--- a/var/spack/packages/automaded/package.py
+++ b/var/spack/repos/builtin/packages/automaded/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -36,8 +36,8 @@ class Automaded(Package):
finding the process (or group of processes) that caused the hang.
"""
- homepage = "https://github.com/scalability-llnl/AutomaDeD"
- url = "https://github.com/scalability-llnl/AutomaDeD/archive/v1.0.tar.gz"
+ homepage = "https://github.com/llnl/AutomaDeD"
+ url = "https://github.com/llnl/AutomaDeD/archive/v1.0.tar.gz"
version('1.0', '16a3d4def2c4c77d0bc4b21de8b3ab03')
diff --git a/var/spack/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py
index 9115822730..9115822730 100644
--- a/var/spack/packages/automake/package.py
+++ b/var/spack/repos/builtin/packages/automake/package.py
diff --git a/var/spack/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py
index 0d4436fccc..0d4436fccc 100644
--- a/var/spack/packages/bear/package.py
+++ b/var/spack/repos/builtin/packages/bear/package.py
diff --git a/var/spack/packages/bib2xhtml/package.py b/var/spack/repos/builtin/packages/bib2xhtml/package.py
index 7f8e0cfe5a..7f8e0cfe5a 100644
--- a/var/spack/packages/bib2xhtml/package.py
+++ b/var/spack/repos/builtin/packages/bib2xhtml/package.py
diff --git a/var/spack/repos/builtin/packages/binutils/binutilskrell-2.24.patch b/var/spack/repos/builtin/packages/binutils/binutilskrell-2.24.patch
new file mode 100644
index 0000000000..f48291a6c9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/binutils/binutilskrell-2.24.patch
@@ -0,0 +1,52 @@
+--- binutils-2.24/libiberty/Makefile.in 2013-11-04 10:33:40.000000000 -0500
++++ binutils-2.24-fixes/libiberty/Makefile.in 2014-10-17 16:22:31.413655000 -0400
+@@ -66,6 +66,7 @@
+ MAKEOVERRIDES =
+
+ TARGETLIB = ./libiberty.a
++TARGETLIBPIC = ./libiberty_pic.a
+ TESTLIB = ./testlib.a
+
+ LIBOBJS = @LIBOBJS@
+@@ -355,27 +356,27 @@
+ # since it will be passed the multilib flags.
+ MULTIOSDIR = `$(CC) $(CFLAGS) -print-multi-os-directory`
+ install_to_libdir: all
+- if test -n "${target_header_dir}"; then \
+- ${mkinstalldirs} $(DESTDIR)$(libdir)/$(MULTIOSDIR); \
+- $(INSTALL_DATA) $(TARGETLIB) $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIB)n; \
+- ( cd $(DESTDIR)$(libdir)/$(MULTIOSDIR) ; chmod 644 $(TARGETLIB)n ;$(RANLIB) $(TARGETLIB)n ); \
+- mv -f $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIB)n $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIB); \
+- case "${target_header_dir}" in \
+- /*) thd=${target_header_dir};; \
+- *) thd=${includedir}/${target_header_dir};; \
+- esac; \
+- ${mkinstalldirs} $(DESTDIR)$${thd}; \
+- for h in ${INSTALLED_HEADERS}; do \
+- ${INSTALL_DATA} $$h $(DESTDIR)$${thd}; \
+- done; \
+- fi
++ ${mkinstalldirs} $(DESTDIR)$(libdir)/$(MULTIOSDIR); \
++ $(INSTALL_DATA) $(TARGETLIB) $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIB)n; \
++ $(INSTALL_DATA) pic/$(TARGETLIB) $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIBPIC)n; \
++ ( cd $(DESTDIR)$(libdir)/$(MULTIOSDIR) ; chmod 644 $(TARGETLIB)n ;$(RANLIB) $(TARGETLIB)n ); \
++ ( cd $(DESTDIR)$(libdir)/$(MULTIOSDIR) ; chmod 644 $(TARGETLIBPIC)n ;$(RANLIB) $(TARGETLIBPIC)n ); \
++ mv -f $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIB)n $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIB); \
++ mv -f $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIBPIC)n $(DESTDIR)$(libdir)/$(MULTIOSDIR)/$(TARGETLIBPIC); \
++ ${mkinstalldirs} $(DESTDIR)$${includedir}; \
++ for h in ${INSTALLED_HEADERS}; do \
++ ${INSTALL_DATA} $$h $(DESTDIR)$${includedir}; \
++ done;
+ @$(MULTIDO) $(FLAGS_TO_PASS) multi-do DO=install
+
+ install_to_tooldir: all
+ ${mkinstalldirs} $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR)
+ $(INSTALL_DATA) $(TARGETLIB) $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR)/$(TARGETLIB)n
++ $(INSTALL_DATA) pic/$(TARGETLIB) $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR)/$(TARGETLIBPIC)n
+ ( cd $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR) ; chmod 644 $(TARGETLIB)n; $(RANLIB) $(TARGETLIB)n )
++ ( cd $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR) ; chmod 644 $(TARGETLIBPIC)n; $(RANLIB) $(TARGETLIBPIC)n )
+ mv -f $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR)/$(TARGETLIB)n $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR)/$(TARGETLIB)
++ mv -f $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR)/$(TARGETLIBPIC)n $(DESTDIR)$(tooldir)/lib/$(MULTIOSDIR)/$(TARGETLIBPIC)
+ @$(MULTIDO) $(FLAGS_TO_PASS) multi-do DO=install
+
+ # required-list was used when building a shared bfd/opcodes/libiberty
diff --git a/var/spack/repos/builtin/packages/binutils/cr16.patch b/var/spack/repos/builtin/packages/binutils/cr16.patch
new file mode 100644
index 0000000000..2727c70b23
--- /dev/null
+++ b/var/spack/repos/builtin/packages/binutils/cr16.patch
@@ -0,0 +1,26 @@
+--- old/opcodes/cr16-dis.c 2014-10-14 03:32:04.000000000 -0400
++++ new/opcodes/cr16-dis.c 2016-01-14 21:54:26.000000000 -0500
+@@ -78,7 +78,7 @@
+ REG_ARG_TYPE;
+
+ /* Current opcode table entry we're disassembling. */
+-const inst *instruction;
++extern const inst *instruction;
+ /* Current instruction we're disassembling. */
+ ins cr16_currInsn;
+ /* The current instruction is read into 3 consecutive words. */
+@@ -86,12 +86,12 @@
+ /* Contains all words in appropriate order. */
+ ULONGLONG cr16_allWords;
+ /* Holds the current processed argument number. */
+-int processing_argument_number;
++extern int processing_argument_number;
+ /* Nonzero means a IMM4 instruction. */
+ int imm4flag;
+ /* Nonzero means the instruction's original size is
+ incremented (escape sequence is used). */
+-int size_changed;
++extern int size_changed;
+
+
+ /* Print the constant expression length. */
diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py
new file mode 100644
index 0000000000..de04221e33
--- /dev/null
+++ b/var/spack/repos/builtin/packages/binutils/package.py
@@ -0,0 +1,40 @@
+from spack import *
+
+class Binutils(Package):
+ """GNU binutils, which contain the linker, assembler, objdump and others"""
+ homepage = "http://www.gnu.org/software/binutils/"
+
+ version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2")
+ version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.24.tar.bz2")
+ version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.23.2.tar.bz2")
+ version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764',url="ftp://ftp.gnu.org/gnu/binutils/binutils-2.20.1.tar.bz2")
+
+ # Add a patch that creates binutils libiberty_pic.a which is preferred by OpenSpeedShop and cbtf-krell
+ variant('krellpatch', default=False, description="build with openspeedshop based patch.")
+ variant('gold', default=True, description="build the gold linker")
+ patch('binutilskrell-2.24.patch', when='@2.24+krellpatch')
+
+ patch('cr16.patch')
+
+ variant('libiberty', default=False, description='Also install libiberty.')
+
+ def install(self, spec, prefix):
+ configure_args = [
+ '--prefix=%s' % prefix,
+ '--disable-dependency-tracking',
+ '--enable-interwork',
+ '--enable-multilib',
+ '--enable-shared',
+ '--enable-64-bit-bfd',
+ '--enable-targets=all',
+ '--with-sysroot=/']
+
+ if '+gold' in spec:
+ configure_args.append('--enable-gold')
+
+ if '+libiberty' in spec:
+ configure_args.append('--enable-install-libiberty')
+
+ configure(*configure_args)
+ make()
+ make("install")
diff --git a/var/spack/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py
index 7c526fb958..7c526fb958 100644
--- a/var/spack/packages/bison/package.py
+++ b/var/spack/repos/builtin/packages/bison/package.py
diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py
new file mode 100644
index 0000000000..3427b74ad6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/boost/package.py
@@ -0,0 +1,148 @@
+from spack import *
+
+class Boost(Package):
+ """Boost provides free peer-reviewed portable C++ source
+ libraries, emphasizing libraries that work well with the C++
+ Standard Library.
+
+ Boost libraries are intended to be widely useful, and usable
+ across a broad spectrum of applications. The Boost license
+ encourages both commercial and non-commercial use.
+ """
+ homepage = "http://www.boost.org"
+ url = "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2"
+ list_url = "http://sourceforge.net/projects/boost/files/boost/"
+ list_depth = 2
+
+ version('1.60.0', '65a840e1a0b13a558ff19eeb2c4f0cbe')
+ version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87')
+ version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546')
+ version('1.57.0', '1be49befbdd9a5ce9def2983ba3e7b76')
+ version('1.56.0', 'a744cf167b05d72335f27c88115f211d')
+ version('1.55.0', 'd6eef4b4cacb2183f2bf265a5a03a354')
+ version('1.54.0', '15cb8c0803064faef0c4ddf5bc5ca279')
+ version('1.53.0', 'a00d22605d5dbcfb4c9936a9b35bc4c2')
+ version('1.52.0', '3a855e0f919107e0ca4de4d84ad3f750')
+ version('1.51.0', '4b6bd483b692fd138aef84ed2c8eb679')
+ version('1.50.0', '52dd00be775e689f55a987baebccc462')
+ version('1.49.0', '0d202cb811f934282dea64856a175698')
+ version('1.48.0', 'd1e9a7a7f532bb031a3c175d86688d95')
+ version('1.47.0', 'a2dc343f7bc7f83f8941e47ed4a18200')
+ version('1.46.1', '7375679575f4c8db605d426fc721d506')
+ version('1.46.0', '37b12f1702319b73876b0097982087e0')
+ version('1.45.0', 'd405c606354789d0426bc07bea617e58')
+ version('1.44.0', 'f02578f5218f217a9f20e9c30e119c6a')
+ version('1.43.0', 'dd49767bfb726b0c774f7db0cef91ed1')
+ version('1.42.0', '7bf3b4eb841b62ffb0ade2b82218ebe6')
+ version('1.41.0', '8bb65e133907db727a2a825c5400d0a6')
+ version('1.40.0', 'ec3875caeac8c52c7c129802a8483bd7')
+ version('1.39.0', 'a17281fd88c48e0d866e1a12deecbcc0')
+ version('1.38.0', '5eca2116d39d61382b8f8235915cb267')
+ version('1.37.0', '8d9f990bfb7e83769fa5f1d6f065bc92')
+ version('1.36.0', '328bfec66c312150e4c2a78dcecb504b')
+ version('1.35.0', 'dce952a7214e72d6597516bcac84048b')
+ version('1.34.1', '2d938467e8a448a2c9763e0a9f8ca7e5')
+ version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0')
+
+ variant('debug', default=False, description='Switch to the debug version of Boost')
+ variant('python', default=False, description='Activate the component Boost.Python')
+ variant('mpi', default=False, description='Activate the component Boost.MPI')
+ variant('compression', default=True, description='Activate the compression Boost.iostreams')
+
+ depends_on('python', when='+python')
+ depends_on('mpi', when='+mpi')
+ depends_on('bzip2', when='+compression')
+ depends_on('zlib', when='+compression')
+
+ def url_for_version(self, version):
+ """Handle Boost's weird URLs, which write the version two different ways."""
+ parts = [str(p) for p in Version(version)]
+ dots = ".".join(parts)
+ underscores = "_".join(parts)
+ return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (
+ dots, underscores)
+
+ def determine_toolset(self, spec):
+ if spec.satisfies("=darwin-x86_64"):
+ return 'darwin'
+
+ toolsets = {'g++': 'gcc',
+ 'icpc': 'intel',
+ 'clang++': 'clang'}
+
+ for cc, toolset in toolsets.iteritems():
+ if cc in self.compiler.cxx_names:
+ return toolset
+
+ # fallback to gcc if no toolset found
+ return 'gcc'
+
+ def determine_bootstrap_options(self, spec, options):
+ options.append('--with-toolset=%s' % self.determine_toolset(spec))
+
+ without_libs = []
+ if '~mpi' in spec:
+ without_libs.append('mpi')
+ if '~python' in spec:
+ without_libs.append('python')
+ else:
+ options.append('--with-python=%s' %
+ join_path(spec['python'].prefix.bin, 'python'))
+
+ if without_libs:
+ options.append('--without-libraries=%s' % ','.join(without_libs))
+
+ with open('user-config.jam', 'w') as f:
+ if '+mpi' in spec:
+ f.write('using mpi : %s ;\n' %
+ join_path(spec['mpi'].prefix.bin, 'mpicxx'))
+ if '+python' in spec:
+ f.write('using python : %s : %s ;\n' %
+ (spec['python'].version,
+ join_path(spec['python'].prefix.bin, 'python')))
+
+ def determine_b2_options(self, spec, options):
+ if '+debug' in spec:
+ options.append('variant=debug')
+ else:
+ options.append('variant=release')
+
+ if '~compression' in spec:
+ options.extend([
+ '-s', 'NO_BZIP2=1',
+ '-s', 'NO_ZLIB=1'])
+
+ if '+compression' in spec:
+ options.extend([
+ '-s', 'BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include,
+ '-s', 'BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib,
+ '-s', 'ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include,
+ '-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib,
+ ])
+
+ options.extend([
+ 'toolset=%s' % self.determine_toolset(spec),
+ 'link=static,shared',
+ 'threading=single,multi',
+ '--layout=tagged'])
+
+ def install(self, spec, prefix):
+ # to make Boost find the user-config.jam
+ env['BOOST_BUILD_PATH'] = './'
+
+ bootstrap = Executable('./bootstrap.sh')
+
+ bootstrap_options = ['--prefix=%s' % prefix]
+ self.determine_bootstrap_options(spec, bootstrap_options)
+
+ bootstrap(*bootstrap_options)
+
+ # b2 used to be called bjam, before 1.47 (sigh)
+ b2name = './b2' if spec.satisfies('@1.47:') else './bjam'
+
+ b2 = Executable(b2name)
+ b2_options = ['-j', '%s' % make_jobs]
+
+ self.determine_b2_options(spec, b2_options)
+
+ b2('install', *b2_options)
diff --git a/var/spack/packages/bowtie2/bowtie2-2.5.patch b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch
index 290be39c73..290be39c73 100644
--- a/var/spack/packages/bowtie2/bowtie2-2.5.patch
+++ b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch
diff --git a/var/spack/packages/bowtie2/package.py b/var/spack/repos/builtin/packages/bowtie2/package.py
index 339aab6598..339aab6598 100644
--- a/var/spack/packages/bowtie2/package.py
+++ b/var/spack/repos/builtin/packages/bowtie2/package.py
diff --git a/var/spack/packages/boxlib/package.py b/var/spack/repos/builtin/packages/boxlib/package.py
index 4f1b71132f..4f1b71132f 100644
--- a/var/spack/packages/boxlib/package.py
+++ b/var/spack/repos/builtin/packages/boxlib/package.py
diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py
new file mode 100644
index 0000000000..638ba1fa4d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bzip2/package.py
@@ -0,0 +1,60 @@
+from spack import *
+
+class Bzip2(Package):
+ """bzip2 is a freely available, patent free high-quality data
+ compressor. It typically compresses files to within 10% to 15%
+ of the best available techniques (the PPM family of statistical
+ compressors), whilst being around twice as fast at compression
+ and six times faster at decompression.
+
+ """
+ homepage = "http://www.bzip.org"
+ url = "http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz"
+
+ version('1.0.6', '00b516f4704d4a7cb50a1d97e6e8e15b')
+
+
+ def patch(self):
+ mf = FileFilter('Makefile-libbz2_so')
+ mf.filter(r'^CC=gcc', 'CC=cc')
+
+ # Below stuff patches the link line to use RPATHs on Mac OS X.
+ if 'darwin' in self.spec.architecture:
+ v = self.spec.version
+ v1, v2, v3 = (v.up_to(i) for i in (1,2,3))
+
+ mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)'.format(v2, v3),
+ '$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)'.format(v1, v2, v3, v3), string=True)
+
+ mf.filter('$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3),
+ '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib'.format(v3), string=True)
+ mf.filter('rm -f libbz2.so.{0}'.format(v2),
+ 'rm -f libbz2.{0}.dylib'.format(v2), string=True)
+ mf.filter('ln -s libbz2.so.{0} libbz2.so.{1}'.format(v3, v2),
+ 'ln -s libbz2.{0}.dylib libbz2.{1}.dylib'.format(v3, v2), string=True)
+
+
+ def install(self, spec, prefix):
+ make('-f', 'Makefile-libbz2_so')
+ make('clean')
+ make("install", "PREFIX=%s" % prefix)
+
+ install('bzip2-shared', join_path(prefix.bin, 'bzip2'))
+
+ v1, v2, v3 = (self.spec.version.up_to(i) for i in (1,2,3))
+ if 'darwin' in self.spec.architecture:
+ lib = 'libbz2.dylib'
+ lib1, lib2, lib3 = ('libbz2.{0}.dylib'.format(v) for v in (v1, v2, v3))
+ else:
+ lib = 'libbz2.so'
+ lib1, lib2, lib3 = ('libbz2.so.{0}'.format(v) for v in (v1, v2, v3))
+
+ install(lib3, join_path(prefix.lib, lib3))
+ with working_dir(prefix.lib):
+ for l in (lib, lib1, lib2):
+ symlink(lib3, l)
+
+ with working_dir(prefix.bin):
+ force_remove('bunzip2', 'bzcat')
+ symlink('bzip2', 'bunzip2')
+ symlink('bzip2', 'bzcat')
diff --git a/var/spack/packages/cairo/package.py b/var/spack/repos/builtin/packages/cairo/package.py
index e1ac8aaa7d..8255e869be 100644
--- a/var/spack/packages/cairo/package.py
+++ b/var/spack/repos/builtin/packages/cairo/package.py
@@ -14,6 +14,7 @@ class Cairo(Package):
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
+ "--disable-trace", # can cause problems with libiberty
"--enable-tee")
make()
make("install")
diff --git a/var/spack/packages/callpath/package.py b/var/spack/repos/builtin/packages/callpath/package.py
index f8a1eab9f7..3d2d96249e 100644
--- a/var/spack/packages/callpath/package.py
+++ b/var/spack/repos/builtin/packages/callpath/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -28,8 +28,8 @@ class Callpath(Package):
"""Library for representing callpaths consistently in
distributed-memory performance tools."""
- homepage = "https://github.com/scalability-llnl/callpath"
- url = "https://github.com/scalability-llnl/callpath/archive/v1.0.1.tar.gz"
+ homepage = "https://github.com/llnl/callpath"
+ url = "https://github.com/llnl/callpath/archive/v1.0.1.tar.gz"
version('1.0.2', 'b1994d5ee7c7db9d27586fc2dcf8f373')
version('1.0.1', '0047983d2a52c5c335f8ba7f5bab2325')
diff --git a/var/spack/packages/cblas/package.py b/var/spack/repos/builtin/packages/cblas/package.py
index 3cfe5ee588..3cfe5ee588 100644
--- a/var/spack/packages/cblas/package.py
+++ b/var/spack/repos/builtin/packages/cblas/package.py
diff --git a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
new file mode 100644
index 0000000000..7b07933911
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
@@ -0,0 +1,66 @@
+################################################################################
+# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+# Place, Suite 330, Boston, MA 02111-1307 USA
+################################################################################
+
+from spack import *
+
+class CbtfArgonavis(Package):
+ """CBTF Argo Navis project contains the CUDA collector and supporting
+ libraries that was done as a result of a DOE SBIR grant."""
+ homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
+
+ # Mirror access template example
+ #url = "file:/g/g24/jeg/cbtf-argonavis-1.5.tar.gz"
+ #version('1.5', '1f7f6512f55409ed2135cfceabe26b82')
+
+ version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-argonavis/cbtf-argonavis')
+
+ depends_on("cmake@3.0.2:")
+ depends_on("papi")
+ depends_on("cbtf")
+ depends_on("cbtf-krell")
+ depends_on("cuda")
+
+ parallel = False
+
+ def install(self, spec, prefix):
+
+ # Look for package installation information in the cbtf and cbtf-krell prefixes
+ cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
+
+ # FIXME, hard coded for testing purposes, we will alter when the external package feature is available
+ cuda_prefix_path = "/usr/local/cudatoolkit-6.0"
+ cupti_prefix_path = "/usr/local/cudatoolkit-6.0/extras/CUPTI"
+
+
+ with working_dir('CUDA'):
+ with working_dir('build', create=True):
+ cmake('..',
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ '-DCUDA_INSTALL_PATH=%s' % cuda_prefix_path,
+ '-DCUDA_ROOT=%s' % cuda_prefix_path,
+ '-DCUPTI_ROOT=%s' % cupti_prefix_path,
+ '-DCUDA_DIR=%s' % cuda_prefix_path,
+ '-DPAPI_ROOT=%s' % spec['papi'].prefix,
+ '-DCBTF_PREFIX=%s' % spec['cbtf'].prefix,
+ *std_cmake_args)
+ make("clean")
+ make()
+ make("install")
+
diff --git a/var/spack/repos/builtin/packages/cbtf-krell/package.py b/var/spack/repos/builtin/packages/cbtf-krell/package.py
new file mode 100644
index 0000000000..9458ac113c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cbtf-krell/package.py
@@ -0,0 +1,116 @@
+################################################################################
+# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+# Place, Suite 330, Boston, MA 02111-1307 USA
+################################################################################
+
+from spack import *
+
+class CbtfKrell(Package):
+ """CBTF Krell project contains the Krell Institute contributions to the CBTF project.
+ These contributions include many performance data collectors and support
+ libraries as well as some example tools that drive the data collection at
+ HPC levels of scale."""
+ homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
+
+ # optional mirror access template
+ #url = "file:/g/g24/jeg/cbtf-krell-1.5.tar.gz"
+ #version('1.5', 'b13f6df6a93c44149d977773dd776d2f')
+
+ version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-krell/cbtf-krell')
+
+
+ # Dependencies for cbtf-krell
+
+ # For binutils service
+ depends_on("binutils@2.24+krellpatch")
+
+ # collectionTool
+ depends_on("boost@1.50.0")
+ depends_on("dyninst@8.2.1")
+ depends_on("mrnet@4.1.0:+lwthreads")
+ depends_on("xerces-c@3.1.1:")
+ depends_on("cbtf")
+
+ # for services and collectors
+ depends_on("libmonitor+krellpatch")
+ depends_on("libunwind")
+ depends_on("papi")
+
+ # MPI Installations
+ # These have not worked either for build or execution, commenting out for now
+ #depends_on("openmpi")
+ #depends_on("mvapich2@2.0")
+ #depends_on("mpich")
+
+ parallel = False
+
+ def install(self, spec, prefix):
+
+ # Add in paths for finding package config files that tell us where to find these packages
+ cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix)
+
+ # FIXME - hard code path until external package support is available
+ # Need to change this path and/or add additional paths for MPI experiment support on different platforms
+ #openmpi_prefix_path = "/opt/openmpi-1.8.2"
+ #mvapich_prefix_path = "/usr/local/tools/mvapich-gnu"
+
+ # Other possibilities, they will need a -DMVAPICH_DIR=, etc clause in the cmake command to be recognized
+ # mvapich_prefix_path = "<mvapich install path>"
+ # mvapich2_prefix_path = "<mvapich2 install path>"
+ # mpich2_prefix_path = "<mpich2 install path>"
+ # mpich_prefix_path = "<mpich install path>"
+ # mpt_prefix_path = "<mpt install path>"
+
+ # Add in paths for cuda if requested via the cuda variant
+ # FIXME - hard code path until external package support is available
+ #if '+cuda' in spec:
+ # cuda_prefix_path = "/usr/local/cuda-6.0"
+ # cupti_prefix_path = "/usr/local/cuda-6.0/extras/CUPTI"
+ #else:
+ # cuda_prefix_path = ""
+ # cupti_prefix_path = ""
+
+ #'-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix,
+ #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix,
+ #'-DMPICH_DIR=%s' % spec['mpich'].prefix,
+ #'-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
+ #'-DOPENMPI_DIR=%s' % openmpi_prefix_path,
+ #'-DMVAPICH_DIR=%s' % mvapich_prefix_path,
+ #'-DLIB_SUFFIX=64',
+ #'-DCUDA_DIR=%s' % cuda_prefix_path,
+ #'-DCUPTI_DIR=%s' % cupti_prefix_path,
+
+ # Build cbtf-krell with cmake
+ with working_dir('build_cbtf_krell', create=True):
+ cmake('..',
+ '-DCMAKE_BUILD_TYPE=Debug',
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+ '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
+ '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
+ '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
+ '-DPAPI_DIR=%s' % spec['papi'].prefix,
+ '-DBOOST_DIR=%s' % spec['boost'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
+ '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ *std_cmake_args)
+
+ make("clean")
+ make()
+ make("install")
+
diff --git a/var/spack/repos/builtin/packages/cbtf-lanl/package.py b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
new file mode 100644
index 0000000000..2da9e8a1f7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
@@ -0,0 +1,60 @@
+################################################################################
+# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+# Place, Suite 330, Boston, MA 02111-1307 USA
+################################################################################
+
+from spack import *
+
+class CbtfLanl(Package):
+ """CBTF LANL project contains a memory tool and data center type system command monitoring tool."""
+ homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
+
+
+ # Mirror access template example
+ #url = "file:/g/g24/jeg/cbtf-lanl-1.5.tar.gz"
+ #version('1.5', 'c3f78f967b0a42c6734ce4be0e602426')
+
+ version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl')
+
+
+ # Dependencies for cbtf-krell
+ depends_on("boost@1.50")
+ depends_on("mrnet@4.1.0:+lwthreads")
+ depends_on("xerces-c@3.1.1:")
+ depends_on("cbtf")
+ depends_on("cbtf-krell")
+
+ parallel = False
+
+ def install(self, spec, prefix):
+
+ # Add in paths for finding package config files that tell us where to find these packages
+ cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
+
+ with working_dir('build', create=True):
+ cmake('..',
+ '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+ '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'),
+ *std_cmake_args)
+
+ make("clean")
+ make()
+ make("install")
+
diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py
new file mode 100644
index 0000000000..52e6a07020
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cbtf/package.py
@@ -0,0 +1,62 @@
+################################################################################
+# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+# Place, Suite 330, Boston, MA 02111-1307 USA
+################################################################################
+
+from spack import *
+
+class Cbtf(Package):
+ """CBTF project contains the base code for CBTF that supports creating components,
+ component networks and the support to connect these components and component
+ networks into sequential and distributed network tools."""
+ homepage = "http://sourceforge.net/p/cbtf/wiki/Home"
+
+ # Mirror access template example
+ #url = "file:/g/g24/jeg/cbtf-1.5.tar.gz"
+ #version('1.6', '1ca88a8834759c4c74452cb97fe7b70a')
+
+ # Use when the git repository is available
+ version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf/cbtf')
+
+ depends_on("cmake")
+ #depends_on("boost@1.42.0:")
+ depends_on("boost@1.50.0")
+ depends_on("mrnet@4.1.0+lwthreads")
+ depends_on("xerces-c@3.1.1:")
+ depends_on("libxml2")
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ with working_dir('build', create=True):
+
+ # Boost_NO_SYSTEM_PATHS Set to TRUE to suppress searching
+ # in system paths (or other locations outside of BOOST_ROOT
+ # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT.
+ # Defaults to OFF.
+
+ cmake('..',
+ '--debug-output',
+ '-DBoost_NO_SYSTEM_PATHS=TRUE',
+ '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake'),
+ *std_cmake_args)
+
+ make("clean")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/cereal/Werror.patch b/var/spack/repos/builtin/packages/cereal/Werror.patch
new file mode 100644
index 0000000000..d39eaaffdb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cereal/Werror.patch
@@ -0,0 +1,33 @@
+--- old/sandbox/CMakeLists.txt
++++ new/sandbox/CMakeLists.txt
+@@ -4,9 +4,11 @@
+ add_executable(sandbox_json sandbox_json.cpp)
+ add_executable(sandbox_rtti sandbox_rtti.cpp)
+
++if(Boost_FOUND)
+ add_executable(sandbox_vs sandbox_vs.cpp)
+ target_link_libraries(sandbox_vs sandbox_vs_dll)
+ include_directories(sandbox_shared_lib)
++endif(Boost_FOUND)
+
+ if(Boost_FOUND)
+ add_executable(performance performance.cpp)
+--- old/include/cereal/types/common.hpp
++++ new/include/cereal/types/common.hpp
+@@ -106,14 +106,16 @@
+ t = reinterpret_cast<typename common_detail::is_enum<T>::type const &>( value );
+ }
+
++#ifndef CEREAL_ENABLE_RAW_POINTER_SERIALIZATION
+ //! Serialization for raw pointers
+ /*! This exists only to throw a static_assert to let users know we don't support raw pointers. */
+ template <class Archive, class T> inline
+ void CEREAL_SERIALIZE_FUNCTION_NAME( Archive &, T * & )
+ {
+ static_assert(cereal::traits::detail::delay_static_assert<T>::value,
+ "Cereal does not support serializing raw pointers - please use a smart pointer");
+ }
++#endif
+
+ //! Serialization for C style arrays
+ template <class Archive, class T> inline
diff --git a/var/spack/repos/builtin/packages/cereal/package.py b/var/spack/repos/builtin/packages/cereal/package.py
new file mode 100644
index 0000000000..a83927456f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cereal/package.py
@@ -0,0 +1,34 @@
+from spack import *
+import shutil
+
+class Cereal(Package):
+ """cereal is a header-only C++11 serialization library. cereal takes arbitrary data types and reversibly turns them into different representations, such as compact binary encodings, XML, or JSON. cereal was designed to be fast, light-weight, and easy to extend - it has no external dependencies and can be easily bundled with other code or used standalone."""
+ homepage = "http://uscilab.github.io/cereal/"
+ url = "https://github.com/USCiLab/cereal/archive/v1.1.2.tar.gz"
+
+ version('1.1.2', '34d4ad174acbff005c36d4d10e48cbb9')
+ version('1.1.1', '0ceff308c38f37d5b5f6df3927451c27')
+ version('1.1.0', '9f2d5f72e935c54f4c6d23e954ce699f')
+ version('1.0.0', 'd1bacca70a95cec0ddbff68b0871296b')
+ version('0.9.1', '8872d4444ff274ce6cd1ed364d0fc0ad')
+
+ patch("Werror.patch")
+
+ depends_on("cmake @2.6.2:")
+
+ def install(self, spec, prefix):
+ # Don't use -Werror
+ filter_file(r'-Werror', '', 'CMakeLists.txt')
+
+ # configure
+ # Boost is only used for self-tests, which we are not running (yet?)
+ cmake('.', '-DCMAKE_DISABLE_FIND_PACKAGE_Boost=TRUE', *std_cmake_args)
+
+ # Build
+ make()
+
+ # Install
+ shutil.rmtree(join_path(prefix, 'doc'), ignore_errors=True)
+ shutil.rmtree(join_path(prefix, 'include'), ignore_errors=True)
+ shutil.copytree('doc', join_path(prefix, 'doc'), symlinks=True)
+ shutil.copytree('include', join_path(prefix, 'include'), symlinks=True)
diff --git a/var/spack/repos/builtin/packages/cfitsio/package.py b/var/spack/repos/builtin/packages/cfitsio/package.py
new file mode 100644
index 0000000000..ff450cb5f3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cfitsio/package.py
@@ -0,0 +1,18 @@
+from spack import *
+
+class Cfitsio(Package):
+ """
+ CFITSIO is a library of C and Fortran subroutines for reading and writing
+ data files in FITS (Flexible Image Transport System) data format.
+ """
+ homepage = 'http://heasarc.gsfc.nasa.gov/fitsio/'
+ version('3.370', 'abebd2d02ba5b0503c633581e3bfa116')
+
+ def url_for_version(self, v):
+ url = 'ftp://heasarc.gsfc.nasa.gov/software/fitsio/c/cfitsio{0}.tar.gz'
+ return url.format(str(v).replace('.', ''))
+
+ def install(self, spec, prefix):
+ configure('--prefix=' + prefix)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/cgal/package.py b/var/spack/repos/builtin/packages/cgal/package.py
new file mode 100644
index 0000000000..97356433be
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cgal/package.py
@@ -0,0 +1,73 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Cgal(Package):
+ """
+ CGAL is a software project that provides easy access to efficient and reliable geometric algorithms in the form of
+ a C++ library. CGAL is used in various areas needing geometric computation, such as geographic information systems,
+ computer aided design, molecular biology, medical imaging, computer graphics, and robotics.
+ """
+ homepage = 'http://www.cgal.org/'
+ url = 'https://github.com/CGAL/cgal/archive/releases/CGAL-4.7.tar.gz'
+
+ version('4.7', '4826714810f3b4c65cac96b90fb03b67')
+ version('4.6.3', 'e8ee2ecc8d2b09b94a121c09257b576d')
+
+ # Installation instructions : http://doc.cgal.org/latest/Manual/installation.html
+ variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('debug', default=False, description='Builds a debug version of the libraries')
+
+ depends_on('boost')
+ depends_on('mpfr')
+ depends_on('gmp')
+ depends_on('zlib')
+
+ # FIXME : Qt5 dependency missing (needs Qt5 and OpenGL)
+ # FIXME : Optional third party libraries missing
+
+ def install(self, spec, prefix):
+
+ options = []
+ options.extend(std_cmake_args)
+ # CGAL supports only Release and Debug build type. Any other build type will raise an error at configure time
+ if '+debug' in spec:
+ options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
+ else:
+ options.append('-DCMAKE_BUILD_TYPE:STRING=Release')
+
+ if '+shared' in spec:
+ options.append('-DBUILD_SHARED_LIBS:BOOL=ON')
+ else:
+ options.append('-DBUILD_SHARED_LIBS:BOOL=OFF')
+
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+ with working_dir(build_directory, create=True):
+ cmake(source_directory, *options)
+ make()
+ make("install")
diff --git a/var/spack/packages/cgm/package.py b/var/spack/repos/builtin/packages/cgm/package.py
index 05d6395c5a..05d6395c5a 100644
--- a/var/spack/packages/cgm/package.py
+++ b/var/spack/repos/builtin/packages/cgm/package.py
diff --git a/var/spack/repos/builtin/packages/cityhash/package.py b/var/spack/repos/builtin/packages/cityhash/package.py
new file mode 100644
index 0000000000..1643cc3b42
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cityhash/package.py
@@ -0,0 +1,16 @@
+from spack import *
+from spack.util.environment import *
+
+class Cityhash(Package):
+ homepage = "https://github.com/google/cityhash"
+ url = "https://github.com/google/cityhash"
+
+ version('2013-07-31', git='https://github.com/google/cityhash.git', commit='8af9b8c2b889d80c22d6bc26ba0df1afb79a30db')
+ version('master', branch='master', git='https://github.com/google/cityhash.git')
+
+ def install(self, spec, prefix):
+ configure('--enable-sse4.2', '--prefix=%s' % prefix)
+
+ make()
+ make("install")
+
diff --git a/var/spack/repos/builtin/packages/cleverleaf/package.py b/var/spack/repos/builtin/packages/cleverleaf/package.py
new file mode 100644
index 0000000000..fb400b25c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cleverleaf/package.py
@@ -0,0 +1,23 @@
+from spack import *
+
+class Cleverleaf(Package):
+ """
+ CleverLeaf is a hydrodynamics mini-app that extends CloverLeaf with Adaptive
+ Mesh Refinement using the SAMRAI toolkit from Lawrence Livermore National
+ Laboratory. The primary goal of CleverLeaf is to evaluate the application of
+ AMR to the Lagrangian-Eulerian hydrodynamics scheme used by CloverLeaf.
+ """
+
+ homepage = "http://uk-mac.github.io/CleverLeaf/"
+ url = "https://github.com/UK-MAC/CleverLeaf/tarball/master"
+
+ version('develop', git='https://github.com/UK-MAC/CleverLeaf_ref.git', branch='develop')
+
+ depends_on("SAMRAI@3.8.0:")
+ depends_on("hdf5+mpi")
+ depends_on("boost")
+
+ def install(self, spec, prefix):
+ cmake(*std_cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/packages/cloog/package.py b/var/spack/repos/builtin/packages/cloog/package.py
index 814a33c76c..814a33c76c 100644
--- a/var/spack/packages/cloog/package.py
+++ b/var/spack/repos/builtin/packages/cloog/package.py
diff --git a/var/spack/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py
index 9efa370c8b..f67ae21ebd 100644
--- a/var/spack/packages/cmake/package.py
+++ b/var/spack/repos/builtin/packages/cmake/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -31,15 +31,20 @@ class Cmake(Package):
version('2.8.10.2', '097278785da7182ec0aea8769d06860c',
url = 'http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz')
-
+
version('3.0.2', 'db4c687a31444a929d2fdc36c4dfb95f',
url = 'http://www.cmake.org/files/v3.0/cmake-3.0.2.tar.gz')
-# version('3.0.1', 'e2e05d84cb44a42f1371d9995631dcf5')
-# version('3.0.0', '21a1c85e1a3b803c4b48e7ff915a863e')
+ version('3.4.0', 'cd3034e0a44256a0917e254167217fc8',
+ url = 'http://cmake.org/files/v3.4/cmake-3.4.0.tar.gz')
+
+ variant('ncurses', default=True, description='Enables the build of the ncurses gui')
+
+ depends_on('ncurses', when='+ncurses')
def install(self, spec, prefix):
configure('--prefix=' + prefix,
- '--parallel=' + str(make_jobs))
+ '--parallel=' + str(make_jobs),
+ '--', '-DCMAKE_USE_OPENSSL=ON')
make()
make('install')
diff --git a/var/spack/packages/coreutils/package.py b/var/spack/repos/builtin/packages/coreutils/package.py
index 78c608d8eb..78c608d8eb 100644
--- a/var/spack/packages/coreutils/package.py
+++ b/var/spack/repos/builtin/packages/coreutils/package.py
diff --git a/var/spack/packages/cppcheck/package.py b/var/spack/repos/builtin/packages/cppcheck/package.py
index 8e98f457ee..8e98f457ee 100644
--- a/var/spack/packages/cppcheck/package.py
+++ b/var/spack/repos/builtin/packages/cppcheck/package.py
diff --git a/var/spack/packages/cram/package.py b/var/spack/repos/builtin/packages/cram/package.py
index 4b8ec56f25..b19422b8a8 100644
--- a/var/spack/packages/cram/package.py
+++ b/var/spack/repos/builtin/packages/cram/package.py
@@ -2,11 +2,12 @@ from spack import *
class Cram(Package):
"""Cram runs many small MPI jobs inside one large MPI job."""
- homepage = "https://github.com/scalability-llnl/cram"
- url = "http://github.com/scalability-llnl/cram/archive/v1.0.1.tar.gz"
+ homepage = "https://github.com/llnl/cram"
+ url = "http://github.com/llnl/cram/archive/v1.0.1.tar.gz"
version('1.0.1', 'c73711e945cf5dc603e44395f6647f5e')
+ extends('python')
depends_on("mpi")
def install(self, spec, prefix):
diff --git a/var/spack/packages/cscope/package.py b/var/spack/repos/builtin/packages/cscope/package.py
index 9aac0f7304..9aac0f7304 100644
--- a/var/spack/packages/cscope/package.py
+++ b/var/spack/repos/builtin/packages/cscope/package.py
diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py
new file mode 100644
index 0000000000..17d388c33d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cube/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Cube(Package):
+ """
+ Cube the profile viewer for Score-P and Scalasca profiles. It displays a multi-dimensional performance space
+ consisting of the dimensions:
+ - performance metric
+ - call path
+ - system resource
+ """
+
+ homepage = "http://www.scalasca.org/software/cube-4.x/download.html"
+ url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz"
+
+ version('4.3.3', '07e109248ed8ffc7bdcce614264a2909',
+ url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.3.tar.gz')
+
+ version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20',
+ url="http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz")
+
+ # TODO : add variant that builds GUI on top of Qt
+
+ def install(self, spec, prefix):
+ configure_args = ["--prefix=%s" % prefix,
+ "--without-paraver",
+ "--without-gui"]
+ configure(*configure_args)
+ make(parallel=False)
+ make("install", parallel=False)
diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py
new file mode 100644
index 0000000000..9e684445c7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/curl/package.py
@@ -0,0 +1,25 @@
+from spack import *
+
+class Curl(Package):
+ """cURL is an open source command line tool and library for
+ transferring data with URL syntax"""
+
+ homepage = "http://curl.haxx.se"
+ url = "http://curl.haxx.se/download/curl-7.46.0.tar.bz2"
+
+ version('7.46.0', '9979f989a2a9930d10f1b3deeabc2148')
+ version('7.45.0', '62c1a352b28558f25ba6209214beadc8')
+ version('7.44.0', '6b952ca00e5473b16a11f05f06aa8dae')
+ version('7.43.0', '11bddbb452a8b766b932f859aaeeed39')
+ version('7.42.1', '296945012ce647b94083ed427c1877a8')
+
+ depends_on("openssl")
+ depends_on("zlib")
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix,
+ '--with-zlib=%s' % spec['zlib'].prefix,
+ '--with-ssl=%s' % spec['openssl'].prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/czmq/package.py b/var/spack/repos/builtin/packages/czmq/package.py
new file mode 100644
index 0000000000..0665332179
--- /dev/null
+++ b/var/spack/repos/builtin/packages/czmq/package.py
@@ -0,0 +1,33 @@
+from spack import *
+import os
+
+class Czmq(Package):
+ """ A C interface to the ZMQ library """
+ homepage = "http://czmq.zeromq.org"
+ url = "https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz"
+
+ version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz')
+
+ depends_on('libtool')
+ depends_on('automake')
+ depends_on('autoconf')
+ depends_on('pkg-config')
+ depends_on('zeromq')
+
+ def install(self, spec, prefix):
+ bash = which("bash")
+ # Work around autogen.sh oddities
+ # bash("./autogen.sh")
+ mkdirp("config")
+ autoreconf = which("autoreconf")
+ autoreconf("--install", "--verbose", "--force",
+ "-I", "config",
+ "-I", os.path.join(spec['pkg-config'].prefix, "share", "aclocal"),
+ "-I", os.path.join(spec['automake'].prefix, "share", "aclocal"),
+ "-I", os.path.join(spec['libtool'].prefix, "share", "aclocal"),
+ )
+ configure("--prefix=%s" % prefix)
+
+ make()
+ make("install")
+
diff --git a/var/spack/repos/builtin/packages/damselfly/package.py b/var/spack/repos/builtin/packages/damselfly/package.py
new file mode 100644
index 0000000000..96666d1abe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/damselfly/package.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class Damselfly(Package):
+ """Damselfly is a model-based parallel network simulator."""
+ homepage = "https://github.com/llnl/damselfly"
+ url = "https://github.com/llnl/damselfly"
+
+ version('1.0', '05cf7e2d8ece4408c0f2abb7ab63fd74c0d62895', git='https://github.com/llnl/damselfly.git', tag='v1.0')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake('-DCMAKE_BUILD_TYPE=release', '..', *std_cmake_args)
+ make()
+ make('install')
diff --git a/var/spack/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py
index f7f394498c..294b0de54e 100644
--- a/var/spack/packages/dbus/package.py
+++ b/var/spack/repos/builtin/packages/dbus/package.py
@@ -20,7 +20,10 @@ class Dbus(Package):
version('1.8.2', 'd6f709bbec0a022a1847c7caec9d6068')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure(
+ "--prefix=%s" % prefix,
+ "--disable-systemd",
+ "--disable-launchd")
make()
make("install")
diff --git a/var/spack/packages/docbook-xml/package.py b/var/spack/repos/builtin/packages/docbook-xml/package.py
index fce1de7deb..fce1de7deb 100644
--- a/var/spack/packages/docbook-xml/package.py
+++ b/var/spack/repos/builtin/packages/docbook-xml/package.py
diff --git a/var/spack/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py
index 3d4a4e47a7..3d4a4e47a7 100644
--- a/var/spack/packages/doxygen/package.py
+++ b/var/spack/repos/builtin/packages/doxygen/package.py
diff --git a/var/spack/packages/dri2proto/package.py b/var/spack/repos/builtin/packages/dri2proto/package.py
index 11dfa568e2..11dfa568e2 100644
--- a/var/spack/packages/dri2proto/package.py
+++ b/var/spack/repos/builtin/packages/dri2proto/package.py
diff --git a/var/spack/packages/dtcmp/package.py b/var/spack/repos/builtin/packages/dtcmp/package.py
index 9d940583c1..9d940583c1 100644
--- a/var/spack/packages/dtcmp/package.py
+++ b/var/spack/repos/builtin/packages/dtcmp/package.py
diff --git a/var/spack/packages/dyninst/package.py b/var/spack/repos/builtin/packages/dyninst/package.py
index 41ec57dd2f..0111dcbe08 100644
--- a/var/spack/packages/dyninst/package.py
+++ b/var/spack/repos/builtin/packages/dyninst/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -49,14 +49,15 @@ class Dyninst(Package):
with working_dir('spack-build', create=True):
cmake('..',
- '-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include,
- '-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib,
+ '-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include,
+ '-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib,
'-DBoost_NO_SYSTEM_PATHS=TRUE',
'-DLIBELF_INCLUDE_DIR=%s' % join_path(libelf.include, 'libelf'),
'-DLIBELF_LIBRARIES=%s' % join_path(libelf.lib, 'libelf.so'),
'-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include,
'-DLIBDWARF_LIBRARIES=%s' % join_path(libdwarf.lib, 'libdwarf.so'),
*std_cmake_args)
+
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/eigen/package.py b/var/spack/repos/builtin/packages/eigen/package.py
new file mode 100644
index 0000000000..44ee6819f5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/eigen/package.py
@@ -0,0 +1,68 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by David Beckingsale, david@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Eigen(Package):
+ """
+ Eigen is a C++ template library for linear algebra: matrices, vectors, numerical solvers, and related algorithms
+ """
+
+ homepage = 'http://eigen.tuxfamily.org/'
+ url = 'http://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2'
+
+ version('3.2.7', 'cc1bacbad97558b97da6b77c9644f184', url='http://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2')
+
+ variant('debug', default=False, description='Builds the library in debug mode')
+
+ variant('metis', default=True, description='Enables metis backend')
+ variant('scotch', default=True, description='Enables scotch backend')
+ variant('fftw', default=True, description='Enables FFTW backend')
+
+ # TODO : dependency on SuiteSparse, googlehash, superlu, adolc missing
+
+ depends_on('metis', when='+metis')
+ depends_on('scotch', when='+scotch')
+ depends_on('fftw', when='+fftw')
+
+ depends_on('mpfr@2.3.0:') # Eigen 3.2.7 requires at least 2.3.0
+ depends_on('gmp')
+
+ def install(self, spec, prefix):
+
+ options = []
+ options.extend(std_cmake_args)
+
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+
+ if '+debug' in spec:
+ options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
+
+ with working_dir(build_directory, create=True):
+ cmake(source_directory, *options)
+ make()
+ make("install")
diff --git a/var/spack/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py
index 926d234584..926d234584 100644
--- a/var/spack/packages/elfutils/package.py
+++ b/var/spack/repos/builtin/packages/elfutils/package.py
diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py
new file mode 100644
index 0000000000..2ade5b0b37
--- /dev/null
+++ b/var/spack/repos/builtin/packages/elpa/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Elpa(Package):
+ """
+ Eigenvalue solvers for Petaflop-Applications (ELPA)
+ """
+
+ homepage = 'http://elpa.mpcdf.mpg.de/'
+ url = 'http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
+
+ version('2015.11.001', 'de0f35b7ee7c971fd0dca35c900b87e6', url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz')
+
+ variant('openmp', default=False, description='Activates OpenMP support')
+
+ depends_on('mpi')
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('scalapack')
+
+ def install(self, spec, prefix):
+
+ options = ["--prefix=%s" % prefix]
+
+ if '+openmp' in spec:
+ options.append("--enable-openmp")
+
+ configure(*options)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/expat/package.py b/var/spack/repos/builtin/packages/expat/package.py
new file mode 100644
index 0000000000..082da5bf0b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/expat/package.py
@@ -0,0 +1,17 @@
+from spack import *
+
+class Expat(Package):
+ """<eXpat/> is an XML parser library written in C"""
+ homepage = "http://expat.sourceforge.net/"
+ url = "http://downloads.sourceforge.net/project/expat/expat/2.1.0/expat-2.1.0.tar.gz"
+
+ version('2.1.0', 'dd7dab7a5fea97d2a6a43f511449b7cd')
+
+
+ def install(self, spec, prefix):
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *std_cmake_args)
+ make()
+ make('install')
+
diff --git a/var/spack/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py
index 3ad4cbaf86..3ad4cbaf86 100644
--- a/var/spack/packages/extrae/package.py
+++ b/var/spack/repos/builtin/packages/extrae/package.py
diff --git a/var/spack/packages/exuberant-ctags/package.py b/var/spack/repos/builtin/packages/exuberant-ctags/package.py
index efd2b541b2..efd2b541b2 100644
--- a/var/spack/packages/exuberant-ctags/package.py
+++ b/var/spack/repos/builtin/packages/exuberant-ctags/package.py
diff --git a/var/spack/repos/builtin/packages/fftw/package.py b/var/spack/repos/builtin/packages/fftw/package.py
new file mode 100644
index 0000000000..4d2b964242
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fftw/package.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+
+from spack import *
+
+
+class Fftw(Package):
+ """
+ FFTW is a C subroutine library for computing the discrete Fourier transform (DFT) in one or more dimensions, of
+ arbitrary input size, and of both real and complex data (as well as of even/odd data, i.e. the discrete cosine/sine
+ transforms or DCT/DST). We believe that FFTW, which is free software, should become the FFT library of choice for
+ most applications.
+ """
+ homepage = "http://www.fftw.org"
+ url = "http://www.fftw.org/fftw-3.3.4.tar.gz"
+
+ version('3.3.4', '2edab8c06b24feeb3b82bbb3ebf3e7b3')
+
+ variant('float', default=True, description='Produces a single precision version of the library')
+ variant('long_double', default=True, description='Produces a long double precision version of the library')
+ variant('quad', default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)')
+
+ variant('mpi', default=False, description='Activate MPI support')
+
+ depends_on('mpi', when='+mpi')
+
+ def install(self, spec, prefix):
+ options = ['--prefix=%s' % prefix,
+ '--enable-shared',
+ '--enable-threads',
+ '--enable-openmp']
+ if not self.compiler.f77 or not self.compiler.fc:
+ options.append("--disable-fortran")
+ if '+mpi' in spec:
+ options.append('--enable-mpi')
+
+ configure(*options)
+ make()
+ make("install")
+
+ if '+float' in spec:
+ configure('--enable-float', *options)
+ make()
+ make("install")
+ if '+long_double' in spec:
+ configure('--enable-long-double', *options)
+ make()
+ make("install")
+ if '+quad' in spec:
+ configure('--enable-quad-precision', *options)
+ make()
+ make("install")
diff --git a/var/spack/packages/fish/package.py b/var/spack/repos/builtin/packages/fish/package.py
index 1225558705..1225558705 100644
--- a/var/spack/packages/fish/package.py
+++ b/var/spack/repos/builtin/packages/fish/package.py
diff --git a/var/spack/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py
index b065904912..b065904912 100644
--- a/var/spack/packages/flex/package.py
+++ b/var/spack/repos/builtin/packages/flex/package.py
diff --git a/var/spack/repos/builtin/packages/fltk/font.patch b/var/spack/repos/builtin/packages/fltk/font.patch
new file mode 100644
index 0000000000..7706a1b4ee
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fltk/font.patch
@@ -0,0 +1,44 @@
+Index: FL/x.H
+===================================================================
+--- a/FL/x.H (revision 10476)
++++ b/FL/x.H (working copy)
+@@ -132,6 +132,7 @@
+ XFontStruct *ptr;
+ };
+ extern FL_EXPORT Fl_XFont_On_Demand fl_xfont;
++extern FL_EXPORT XFontStruct* fl_core_font();
+
+ // this object contains all X-specific stuff about a window:
+ // Warning: this object is highly subject to change!
+Index: src/fl_font.cxx
+===================================================================
+--- a/src/fl_font.cxx (revision 10476)
++++ b/src/fl_font.cxx (working copy)
+@@ -55,6 +55,14 @@
+ # include "fl_font_x.cxx"
+ #endif // WIN32
+
++#ifdef WIN32
++#elif defined(__APPLE__)
++#else
++XFontStruct *fl_core_font()
++{
++ return fl_xfont.value();
++}
++#endif
+
+ double fl_width(const char* c) {
+ if (c) return fl_width(c, (int) strlen(c));
+Index: src/gl_draw.cxx
+===================================================================
+--- a/src/gl_draw.cxx (revision 10476)
++++ b/src/gl_draw.cxx (working copy)
+@@ -84,7 +84,7 @@
+ * then sorting through them at draw time (for normal X rendering) to find which one can
+ * render the current glyph... But for now, just use the first font in the list for GL...
+ */
+- XFontStruct *font = fl_xfont;
++ XFontStruct *font = fl_core_font();
+ int base = font->min_char_or_byte2;
+ int count = font->max_char_or_byte2-base+1;
+ fl_fontsize->listbase = glGenLists(256);
diff --git a/var/spack/repos/builtin/packages/fltk/package.py b/var/spack/repos/builtin/packages/fltk/package.py
new file mode 100644
index 0000000000..0b462f83f8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fltk/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fltk(Package):
+ """
+ FLTK (pronounced "fulltick") is a cross-platform C++ GUI toolkit for UNIX/Linux (X11), Microsoft Windows, and
+ MacOS X. FLTK provides modern GUI functionality without the bloat and supports 3D graphics via OpenGL and its
+ built-in GLUT emulation.
+
+ FLTK is designed to be small and modular enough to be statically linked, but works fine as a shared library. FLTK
+ also includes an excellent UI builder called FLUID that can be used to create applications in minutes.
+ """
+ homepage = 'http://www.fltk.org/'
+ url = 'http://fltk.org/pub/fltk/1.3.3/fltk-1.3.3-source.tar.gz'
+
+ version('1.3.3', '9ccdb0d19dc104b87179bd9fd10822e3')
+
+ patch('font.patch', when='@1.3.3')
+
+ variant('shared', default=True, description='Enables the build of shared libraries')
+
+ def install(self, spec, prefix):
+ options = ['--prefix=%s' % prefix,
+ '--enable-localjpeg',
+ '--enable-localpng',
+ '--enable-localzlib']
+
+ if '+shared' in spec:
+ options.append('--enable-shared')
+
+ # FLTK needs to be built in-source
+ configure(*options)
+ make()
+ make('install')
diff --git a/var/spack/packages/flux/package.py b/var/spack/repos/builtin/packages/flux/package.py
index c128f46be8..dc4e0594c5 100644
--- a/var/spack/packages/flux/package.py
+++ b/var/spack/repos/builtin/packages/flux/package.py
@@ -12,20 +12,23 @@ class Flux(Package):
# Also needs autotools, but should use the system version if available
depends_on("zeromq@4.0.4:")
depends_on("czmq@2.2:")
+ depends_on("hwloc")
depends_on("lua@5.1:5.1.99")
depends_on("munge")
depends_on("libjson-c")
depends_on("libxslt")
+ depends_on("python")
+ depends_on("py-cffi")
+
# TODO: This provides a catalog, hacked with environment below for now
depends_on("docbook-xml")
depends_on("asciidoc")
- depends_on("python")
- depends_on("py-cffi")
def install(self, spec, prefix):
# Bootstrap with autotools
bash = which('bash')
bash('./autogen.sh')
+ bash('./autogen.sh') #yes, twice, intentionally
# Fix asciidoc dependency on xml style sheets and whatnot
os.environ['XML_CATALOG_FILES'] = os.path.join(spec['docbook-xml'].prefix,
diff --git a/var/spack/packages/fontconfig/package.py b/var/spack/repos/builtin/packages/fontconfig/package.py
index 89b13604e8..517c9d1813 100644
--- a/var/spack/packages/fontconfig/package.py
+++ b/var/spack/repos/builtin/packages/fontconfig/package.py
@@ -8,9 +8,10 @@ class Fontconfig(Package):
version('2.11.1' , 'e75e303b4f7756c2b16203a57ac87eba')
depends_on('freetype')
+ depends_on('libxml2')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure("--prefix=%s" % prefix, "--enable-libxml2")
make()
make("install")
diff --git a/var/spack/packages/freetype/package.py b/var/spack/repos/builtin/packages/freetype/package.py
index 0309b858a1..0309b858a1 100644
--- a/var/spack/packages/freetype/package.py
+++ b/var/spack/repos/builtin/packages/freetype/package.py
diff --git a/var/spack/packages/gasnet/package.py b/var/spack/repos/builtin/packages/gasnet/package.py
index 705961d1de..705961d1de 100644
--- a/var/spack/packages/gasnet/package.py
+++ b/var/spack/repos/builtin/packages/gasnet/package.py
diff --git a/var/spack/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py
index 5e3d1a3efa..3e5895cfb8 100644
--- a/var/spack/packages/gcc/package.py
+++ b/var/spack/repos/builtin/packages/gcc/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -36,21 +36,29 @@ class Gcc(Package):
list_url = 'http://open-source-box.org/gcc/'
list_depth = 2
+ DEPENDS_ON_ISL_PREDICATE = '@5.0:'
+
+ version('5.3.0', 'c9616fd448f980259c31de613e575719')
+ version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467')
+ version('4.9.3', '6f831b4d251872736e8e9cc09746f327')
version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43')
version('4.9.1', 'fddf71348546af523353bd43d34919c1')
+ version('4.8.5', '80d2c2982a3392bb0b89673ff136e223')
version('4.8.4', '5a84a30839b2aca22a2d723de2a626ec')
version('4.7.4', '4c696da46297de6ae77a82797d2abe28')
version('4.6.4', 'b407a3d1480c11667f293bfb1f17d1a4')
version('4.5.4', '27e459c2566b8209ab064570e1b378f7')
+ variant('gold', default=True, description="Build the gold linker plugin for ld-based LTO")
+
depends_on("mpfr")
depends_on("gmp")
depends_on("mpc") # when @4.5:
- depends_on("libelf")
- depends_on("binutils")
+ depends_on("binutils~libiberty", when='~gold')
+ depends_on("binutils~libiberty+gold", when='+gold')
# Save these until we can do optional deps.
- #depends_on("isl")
+ depends_on("isl", when=DEPENDS_ON_ISL_PREDICATE)
#depends_on("ppl")
#depends_on("cloog")
@@ -62,25 +70,38 @@ class Gcc(Package):
if spec.satisfies("@4.7.1:"):
enabled_languages.add('go')
- # Rest of install is straightforward.
- configure("--prefix=%s" % prefix,
- "--libdir=%s/lib64" % prefix,
- "--disable-multilib",
- "--enable-languages=" + ','.join(enabled_languages),
- "--with-mpc=%s" % spec['mpc'].prefix,
- "--with-mpfr=%s" % spec['mpfr'].prefix,
- "--with-gmp=%s" % spec['gmp'].prefix,
- "--with-libelf=%s" % spec['libelf'].prefix,
- "--with-stage1-ldflags=%s" % self.rpath_args,
- "--with-boot-ldflags=%s" % self.rpath_args,
- "--enable-lto",
- "--with-gnu-ld",
- "--with-ld=%s/bin/ld" % spec['binutils'].prefix,
- "--with-gnu-as",
- "--with-as=%s/bin/as" % spec['binutils'].prefix,
- "--with-quad")
- make()
- make("install")
+ # Generic options to compile GCC
+ options = ["--prefix=%s" % prefix,
+ "--libdir=%s/lib64" % prefix,
+ "--disable-multilib",
+ "--enable-languages=" + ','.join(enabled_languages),
+ "--with-mpc=%s" % spec['mpc'].prefix,
+ "--with-mpfr=%s" % spec['mpfr'].prefix,
+ "--with-gmp=%s" % spec['gmp'].prefix,
+ "--enable-lto",
+ "--with-gnu-ld",
+ "--with-gnu-as",
+ "--with-quad"]
+ # Binutils
+ static_bootstrap_flags = "-static-libstdc++ -static-libgcc"
+ binutils_options = ["--with-sysroot=/",
+ "--with-stage1-ldflags=%s %s" % (self.rpath_args, static_bootstrap_flags),
+ "--with-boot-ldflags=%s %s" % (self.rpath_args, static_bootstrap_flags),
+ "--with-ld=%s/bin/ld" % spec['binutils'].prefix,
+ "--with-as=%s/bin/as" % spec['binutils'].prefix]
+ options.extend(binutils_options)
+ # Isl
+ if spec.satisfies(Gcc.DEPENDS_ON_ISL_PREDICATE):
+ isl_options = ["--with-isl=%s" % spec['isl'].prefix]
+ options.extend(isl_options)
+
+ build_dir = join_path(self.stage.path, 'spack-build')
+ configure = Executable( join_path(self.stage.source_path, 'configure') )
+ with working_dir(build_dir, create=True):
+ # Rest of install is straightforward.
+ configure(*options)
+ make()
+ make("install")
self.write_rpath_specs()
@@ -100,13 +121,11 @@ class Gcc(Package):
return
gcc = Executable(join_path(self.prefix.bin, 'gcc'))
- lines = gcc('-dumpspecs', return_output=True).split("\n")
- for i, line in enumerate(lines):
- if line.startswith("*link:"):
- specs_file = join_path(self.spec_dir, 'specs')
- with closing(open(specs_file, 'w')) as out:
- out.write(lines[i] + "\n")
- out.write("-rpath %s/lib:%s/lib64 \\\n"
- % (self.prefix, self.prefix))
- out.write(lines[i+1] + "\n")
- set_install_permissions(specs_file)
+ lines = gcc('-dumpspecs', output=str).strip().split("\n")
+ specs_file = join_path(self.spec_dir, 'specs')
+ with closing(open(specs_file, 'w')) as out:
+ for line in lines:
+ out.write(line + "\n")
+ if line.startswith("*link:"):
+ out.write("-rpath %s/lib:%s/lib64 \\\n"% (self.prefix, self.prefix))
+ set_install_permissions(specs_file)
diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py
new file mode 100644
index 0000000000..dd02b426b9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gdb/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Gdb(Package):
+ """
+ GDB, the GNU Project debugger, allows you to see what is going on `inside' another program while it executes
+ -- or what another program was doing at the moment it crashed.
+ """
+ homepage = "https://www.gnu.org/software/gdb"
+ url = "http://ftp.gnu.org/gnu/gdb/gdb-7.10.tar.gz"
+
+ version('7.10.1', 'b93a2721393e5fa226375b42d567d90b')
+ version('7.10', 'fa6827ad0fd2be1daa418abb11a54d86')
+ version('7.9.1', 'f3b97de919a9dba84490b2e076ec4cb0')
+ version('7.9', '8f8ced422fe462a00e0135a643544f17')
+ version('7.8.2', '8b0ea8b3559d3d90b3ff4952f0aeafbc')
+
+ depends_on('texinfo')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/gdk-pixbuf/package.py b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
index 14a5569984..14a5569984 100644
--- a/var/spack/packages/gdk-pixbuf/package.py
+++ b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
diff --git a/var/spack/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py
index 4a2657e32f..4a2657e32f 100644
--- a/var/spack/packages/geos/package.py
+++ b/var/spack/repos/builtin/packages/geos/package.py
diff --git a/var/spack/packages/gflags/package.py b/var/spack/repos/builtin/packages/gflags/package.py
index 62dd80a094..62dd80a094 100644
--- a/var/spack/packages/gflags/package.py
+++ b/var/spack/repos/builtin/packages/gflags/package.py
diff --git a/var/spack/packages/ghostscript/package.py b/var/spack/repos/builtin/packages/ghostscript/package.py
index 0ab49d425f..0ab49d425f 100644
--- a/var/spack/packages/ghostscript/package.py
+++ b/var/spack/repos/builtin/packages/ghostscript/package.py
diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py
new file mode 100644
index 0000000000..ddc5078c4d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/git/package.py
@@ -0,0 +1,59 @@
+from spack import *
+
+class Git(Package):
+ """Git is a free and open source distributed version control
+ system designed to handle everything from small to very large
+ projects with speed and efficiency."""
+ homepage = "http://git-scm.com"
+ url = "https://www.kernel.org/pub/software/scm/git/git-2.2.1.tar.gz"
+
+ version('2.6.3', 'b711be7628a4a2c25f38d859ee81b423')
+ version('2.6.2', 'da293290da69f45a86a311ad3cd43dc8')
+ version('2.6.1', '4c62ee9c5991fe93d99cf2a6b68397fd')
+ version('2.6.0', 'eb76a07148d94802a1745d759716a57e')
+ version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b')
+ version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c')
+
+
+ # Git compiles with curl support by default on but if your system
+ # does not have it you will not be able to clone https repos
+ variant("curl", default=False, description="Add the internal support of curl for https clone")
+
+ # Git compiles with expat support by default on but if your system
+ # does not have it you will not be able to push https repos
+ variant("expat", default=False, description="Add the internal support of expat for https push")
+
+ depends_on("openssl")
+ depends_on("curl", when="+curl")
+ depends_on("expat", when="+expat")
+
+ # Use system perl for now.
+ # depends_on("perl")
+ # depends_on("pcre")
+
+ depends_on("zlib")
+
+ def install(self, spec, prefix):
+ configure_args = [
+ "--prefix=%s" % prefix,
+ "--without-pcre",
+ "--with-openssl=%s" % spec['openssl'].prefix,
+ "--with-zlib=%s" % spec['zlib'].prefix
+ ]
+
+ if '+curl' in spec:
+ configure_args.append("--with-curl=%s" % spec['curl'].prefix)
+
+ if '+expat' in spec:
+ configure_args.append("--with-expat=%s" % spec['expat'].prefix)
+
+ configure(*configure_args)
+ make()
+ make("install")
+
+
+
+
+
+
+
diff --git a/var/spack/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py
index 178f0b9df5..67ead5f941 100644
--- a/var/spack/packages/glib/package.py
+++ b/var/spack/repos/builtin/packages/glib/package.py
@@ -11,8 +11,9 @@ class Glib(Package):
version('2.42.1', '89c4119e50e767d3532158605ee9121a')
depends_on("libffi")
+ depends_on("zlib")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
- make("install")
+ make("install", parallel=False)
diff --git a/var/spack/repos/builtin/packages/glm/package.py b/var/spack/repos/builtin/packages/glm/package.py
new file mode 100644
index 0000000000..d00c301b4c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/glm/package.py
@@ -0,0 +1,19 @@
+from spack import *
+
+
+class Glm(Package):
+ """
+ OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on
+ the OpenGL Shading Language (GLSL) specification.
+ """
+
+ homepage = "https://github.com/g-truc/glm"
+ url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
+
+ version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake('..', *std_cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/packages/global/package.py b/var/spack/repos/builtin/packages/global/package.py
index a77b1bdc09..e8f06516d9 100644
--- a/var/spack/packages/global/package.py
+++ b/var/spack/repos/builtin/packages/global/package.py
@@ -4,7 +4,7 @@ import os
class Global(Package):
""" The Gnu Global tagging system """
- # FIXME: add a proper url for your package's homepage here.
+
homepage = "http://www.gnu.org/software/global"
url = "http://tamacom.com/global/global-6.5.tar.gz"
@@ -13,9 +13,9 @@ class Global(Package):
depends_on('exuberant-ctags')
def install(self, spec, prefix):
- config_args = ['--prefix={}'.format(prefix)]
+ config_args = ['--prefix={0}'.format(prefix)]
- config_args.append('--with-exuberant-ctags={}'.format(
+ config_args.append('--with-exuberant-ctags={0}'.format(
os.path.join(spec['exuberant-ctags'].prefix.bin, 'ctags')))
configure(*config_args)
diff --git a/var/spack/packages/glog/package.py b/var/spack/repos/builtin/packages/glog/package.py
index d73386b394..d73386b394 100644
--- a/var/spack/packages/glog/package.py
+++ b/var/spack/repos/builtin/packages/glog/package.py
diff --git a/var/spack/repos/builtin/packages/glpk/package.py b/var/spack/repos/builtin/packages/glpk/package.py
new file mode 100644
index 0000000000..855f459fb3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/glpk/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Glpk(Package):
+ """
+ The GLPK (GNU Linear Programming Kit) package is intended for solving large-scale linear programming (LP), mixed
+ integer programming (MIP), and other related problems. It is a set of routines written in ANSI C and organized in
+ the form of a callable library
+ """
+ homepage = "https://www.gnu.org/software/glpk"
+ url = "http://ftp.gnu.org/gnu/glpk/glpk-4.57.tar.gz"
+
+ version('4.57', '237531a54f73155842f8defe51aedb0f')
+
+ variant('gmp', default=False, description='Activates support for GMP library')
+
+ depends_on('gmp', when='+gmp')
+
+ def install(self, spec, prefix):
+
+ options = ['--prefix=%s' % prefix]
+
+ if '+gmp' in spec:
+ options.append('--with-gmp')
+
+ configure(*options)
+ make()
+ make("install")
diff --git a/var/spack/packages/gmp/package.py b/var/spack/repos/builtin/packages/gmp/package.py
index d6af821b34..fe13de3b95 100644
--- a/var/spack/packages/gmp/package.py
+++ b/var/spack/repos/builtin/packages/gmp/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -31,6 +31,7 @@ class Gmp(Package):
homepage = "https://gmplib.org"
url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"
+ version('6.1.0' , '86ee6e54ebfc4a90b643a65e402c4048')
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
version('6.0.0' , '6ef5869ae735db9995619135bd856b84')
diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py
new file mode 100644
index 0000000000..9d759303cb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gmsh/package.py
@@ -0,0 +1,84 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gmsh(Package):
+ """
+ Gmsh is a free 3D finite element grid generator with a built-in CAD engine and post-processor. Its design goal is
+ to provide a fast, light and user-friendly meshing tool with parametric input and advanced visualization
+ capabilities. Gmsh is built around four modules: geometry, mesh, solver and post-processing. The specification of
+ any input to these modules is done either interactively using the graphical user interface or in ASCII text files
+ using Gmsh's own scripting language.
+ """
+ homepage = 'http://gmsh.info'
+ url = 'http://gmsh.info/src/gmsh-2.11.0-source.tgz'
+
+ version('2.11.0', 'f15b6e7ac9ca649c9a74440e1259d0db')
+
+ # FIXME : Misses dependencies on gmm, PetsC, TetGen
+
+ variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('debug', default=False, description='Builds the library in debug mode')
+ variant('mpi', default=False, description='Builds MPI support for parser and solver')
+ variant('fltk', default=False, description='Enables the build of the FLTK GUI')
+ variant('hdf5', default=False, description='Enables HDF5 support')
+ variant('compression', default=True, description='Enables IO compression through zlib')
+
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('gmp')
+ depends_on('mpi', when='+mpi')
+ depends_on('fltk', when='+fltk') # Assumes OpenGL with GLU is already provided by the system
+ depends_on('hdf5', when='+hdf5')
+ depends_on('zlib', when='+compression')
+
+ def install(self, spec, prefix):
+
+ options = []
+ options.extend(std_cmake_args)
+
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+
+ if '+shared' in spec:
+ options.extend(['-DENABLE_BUILD_SHARED:BOOL=ON',
+ '-DENABLE_BUILD_DYNAMIC:BOOL=ON']) # Builds dynamic executable and installs shared library
+ else:
+ options.append('-DENABLE_BUILD_LIB:BOOL=ON') # Builds and installs static library
+
+ if '+debug' in spec:
+ options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
+
+ if '+mpi' in spec:
+ options.append('-DENABLE_MPI:BOOL=ON')
+
+ if '+compression' in spec:
+ options.append('-DENABLE_COMPRESSED_IO:BOOL=ON')
+
+ with working_dir(build_directory, create=True):
+ cmake(source_directory, *options)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gnuplot/package.py b/var/spack/repos/builtin/packages/gnuplot/package.py
new file mode 100644
index 0000000000..71c09bd43d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gnuplot/package.py
@@ -0,0 +1,61 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+import os
+
+class Gnuplot(Package):
+ """
+ Gnuplot is a portable command-line driven graphing utility for Linux, OS/2, MS Windows, OSX, VMS, and many other
+ platforms. The source code is copyrighted but freely distributed (i.e., you don't have to pay for it). It was
+ originally created to allow scientists and students to visualize mathematical functions and data interactively,
+ but has grown to support many non-interactive uses such as web scripting. It is also used as a plotting engine by
+ third-party applications like Octave. Gnuplot has been supported and under active development since 1986
+ """
+ homepage = "http://www.gnuplot.info"
+ url = "http://downloads.sourceforge.net/project/gnuplot/gnuplot/5.0.1/gnuplot-5.0.1.tar.gz"
+
+ version('5.0.1', '79b4f9e203728f76b60b28bcd402d3c7')
+
+ depends_on('readline')
+ depends_on('libcerf')
+ depends_on('libgd')
+ depends_on('cairo')
+ depends_on('pango')
+ depends_on('wx', when='+wx')
+
+ variant('wx', default=False, description='Activates wxWidgets terminal')
+
+ def install(self, spec, prefix):
+ # It seems there's an open bug for wxWidgets support
+ # See : http://sourceforge.net/p/gnuplot/bugs/1694/
+ os.environ['TERMLIBS'] = '-lX11'
+
+ options = ['--prefix=%s' % prefix]
+
+ configure(*options)
+ make()
+ make("install")
diff --git a/var/spack/packages/gnutls/package.py b/var/spack/repos/builtin/packages/gnutls/package.py
index cf57a24a6d..cf57a24a6d 100644
--- a/var/spack/packages/gnutls/package.py
+++ b/var/spack/repos/builtin/packages/gnutls/package.py
diff --git a/var/spack/packages/gperf/package.py b/var/spack/repos/builtin/packages/gperf/package.py
index 32551b67b4..32551b67b4 100644
--- a/var/spack/packages/gperf/package.py
+++ b/var/spack/repos/builtin/packages/gperf/package.py
diff --git a/var/spack/packages/gperftools/package.py b/var/spack/repos/builtin/packages/gperftools/package.py
index 8900462324..22b2e6c424 100644
--- a/var/spack/packages/gperftools/package.py
+++ b/var/spack/repos/builtin/packages/gperftools/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -30,8 +30,11 @@ class Gperftools(Package):
homepage = "https://code.google.com/p/gperftools"
url = "https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz"
+ version('2.4', '2171cea3bbe053036fb5d5d25176a160', url="https://github.com/gperftools/gperftools/releases/download/gperftools-2.4/gperftools-2.4.tar.gz")
version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90', url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz")
+ depends_on("libunwind")
+
def install(self, spec, prefix):
configure("--prefix=" + prefix)
make()
diff --git a/var/spack/packages/graphlib/package.py b/var/spack/repos/builtin/packages/graphlib/package.py
index ddac0b2b66..ddac0b2b66 100644
--- a/var/spack/packages/graphlib/package.py
+++ b/var/spack/repos/builtin/packages/graphlib/package.py
diff --git a/var/spack/packages/graphviz/package.py b/var/spack/repos/builtin/packages/graphviz/package.py
index 7af7da1881..7af7da1881 100644
--- a/var/spack/packages/graphviz/package.py
+++ b/var/spack/repos/builtin/packages/graphviz/package.py
diff --git a/var/spack/repos/builtin/packages/gsl/package.py b/var/spack/repos/builtin/packages/gsl/package.py
new file mode 100644
index 0000000000..789eb49d85
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gsl/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Gsl(Package):
+ """
+ The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. It is free software under the
+ GNU General Public License. The library provides a wide range of mathematical routines such as random number
+ generators, special functions and least-squares fitting. There are over 1000 functions in total with an extensive
+ test suite.
+ """
+ homepage = "http://www.gnu.org/software/gsl"
+ url = "http://mirror.switch.ch/ftp/mirror/gnu/gsl/gsl-2.1.tar.gz"
+
+ version('2.1' , 'd8f70abafd3e9f0bae03c52d1f4e8de5')
+ version('2.0' , 'ae44cdfed78ece40e73411b63a78c375')
+ version('1.16', 'e49a664db13d81c968415cd53f62bc8b')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/gtkplus/package.py b/var/spack/repos/builtin/packages/gtkplus/package.py
index 0ebc7100de..0ebc7100de 100644
--- a/var/spack/packages/gtkplus/package.py
+++ b/var/spack/repos/builtin/packages/gtkplus/package.py
diff --git a/var/spack/packages/harfbuzz/package.py b/var/spack/repos/builtin/packages/harfbuzz/package.py
index ed7c42a909..ed7c42a909 100644
--- a/var/spack/packages/harfbuzz/package.py
+++ b/var/spack/repos/builtin/packages/harfbuzz/package.py
diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py
new file mode 100644
index 0000000000..ac78d8e961
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hdf5/package.py
@@ -0,0 +1,130 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Hdf5(Package):
+ """HDF5 is a data model, library, and file format for storing and managing
+ data. It supports an unlimited variety of datatypes, and is designed for
+ flexible and efficient I/O and for high volume and complex data.
+ """
+
+ homepage = "http://www.hdfgroup.org/HDF5/"
+ url = "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.13/src/hdf5-1.8.13.tar.gz"
+ list_url = "http://www.hdfgroup.org/ftp/HDF5/releases"
+ list_depth = 3
+
+ version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618')
+ version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24')
+ version('1.8.13', 'c03426e9e77d7766944654280b467289')
+
+ variant('debug', default=False, description='Builds a debug version of the library')
+
+ variant('cxx', default=True, description='Enable C++ support')
+ variant('fortran', default=True, description='Enable Fortran support')
+ variant('unsupported', default=False, description='Enables unsupported configuration options')
+
+ variant('mpi', default=False, description='Enable MPI support')
+ variant('threadsafe', default=False, description='Enable thread-safe capabilities')
+
+ depends_on("mpi", when='+mpi')
+ depends_on("zlib")
+
+ def validate(self, spec):
+ """
+ Checks if incompatible variants have been activated at the same time
+
+ :param spec: spec of the package
+ :raises RuntimeError: in case of inconsistencies
+ """
+ if '+fortran' in spec and not self.compiler.fc:
+ msg = 'cannot build a fortran variant without a fortran compiler'
+ raise RuntimeError(msg)
+
+ if '+threadsafe' in spec and ('+cxx' in spec or '+fortran' in spec):
+ raise RuntimeError("cannot use variant +threadsafe with either +cxx or +fortran")
+
+ def install(self, spec, prefix):
+ self.validate(spec)
+ # Handle compilation after spec validation
+ extra_args = []
+ if '+debug' in spec:
+ extra_args.append('--enable-debug=all')
+ else:
+ extra_args.append('--enable-production')
+
+ if '+unsupported' in spec:
+ extra_args.append("--enable-unsupported")
+
+ if '+cxx' in spec:
+ extra_args.append('--enable-cxx')
+
+ if '+fortran' in spec:
+ extra_args.extend([
+ '--enable-fortran',
+ '--enable-fortran2003'
+ ])
+
+ if '+mpi' in spec:
+ # The HDF5 configure script warns if cxx and mpi are enabled
+ # together. There doesn't seem to be a real reason for this, except
+ # that parts of the MPI interface are not accessible via the C++
+ # interface. Since they are still accessible via the C interface,
+ # this is not actually a problem.
+ extra_args.extend([
+ "--enable-parallel",
+ "CC=%s" % spec['mpi'].prefix.bin + "/mpicc",
+ ])
+
+ if '+cxx' in spec:
+ extra_args.append("CXX=%s" % spec['mpi'].prefix.bin + "/mpic++")
+
+ if '+fortran' in spec:
+ extra_args.append("FC=%s" % spec['mpi'].prefix.bin + "/mpifort")
+
+ if '+threadsafe' in spec:
+ extra_args.extend([
+ '--enable-threadsafe',
+ '--disable-hl',
+ ])
+
+ configure(
+ "--prefix=%s" % prefix,
+ "--with-zlib=%s" % spec['zlib'].prefix,
+ "--enable-shared", # TODO : this should be enabled by default, remove it?
+ *extra_args)
+ make()
+ make("install")
+
+ def url_for_version(self, version):
+ v = str(version)
+
+ if version == Version("1.2.2"):
+ return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz"
+ elif version < Version("1.7"):
+ return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz"
+ else:
+ return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz"
diff --git a/var/spack/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py
index 31a31f376a..7ebede76a3 100644
--- a/var/spack/packages/hwloc/package.py
+++ b/var/spack/repos/builtin/packages/hwloc/package.py
@@ -15,11 +15,16 @@ class Hwloc(Package):
homepage = "http://www.open-mpi.org/projects/hwloc/"
url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz"
+ version('1.11.2', '486169cbe111cdea57be12638828ebbf',
+ url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.2.tar.bz2')
+ version('1.11.1', '002742efd3a8431f98d6315365a2b543',
+ url='http://www.open-mpi.org/software/hwloc/v1.11/downloads/hwloc-1.11.1.tar.bz2')
version('1.9', '1f9f9155682fe8946a97c08896109508')
+ depends_on('libpciaccess')
+
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
-
diff --git a/var/spack/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py
index 198b3f00dc..0f7f14dd89 100644
--- a/var/spack/packages/hypre/package.py
+++ b/var/spack/repos/builtin/packages/hypre/package.py
@@ -5,8 +5,8 @@ class Hypre(Package):
features parallel multigrid methods for both structured and
unstructured grid problems."""
- homepage = "https://computation.llnl.gov/project/linear_solvers/software.php"
- url = "https://computation.llnl.gov/project/linear_solvers/download/hypre-2.10.0b.tar.gz"
+ homepage = "http://computation.llnl.gov/project/linear_solvers/software.php"
+ url = "http://computation.llnl.gov/project/linear_solvers/download/hypre-2.10.0b.tar.gz"
version('2.10.0b', '768be38793a35bb5d055905b271f5b8e')
diff --git a/var/spack/packages/icu/package.py b/var/spack/repos/builtin/packages/icu/package.py
index f256ec5712..f256ec5712 100644
--- a/var/spack/packages/icu/package.py
+++ b/var/spack/repos/builtin/packages/icu/package.py
diff --git a/var/spack/packages/icu4c/package.py b/var/spack/repos/builtin/packages/icu4c/package.py
index 55b44463b2..55b44463b2 100644
--- a/var/spack/packages/icu4c/package.py
+++ b/var/spack/repos/builtin/packages/icu4c/package.py
diff --git a/var/spack/packages/isl/package.py b/var/spack/repos/builtin/packages/isl/package.py
index 836ef3ea40..836ef3ea40 100644
--- a/var/spack/packages/isl/package.py
+++ b/var/spack/repos/builtin/packages/isl/package.py
diff --git a/var/spack/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py
index 8f8076dd14..f8f5fc21bd 100644
--- a/var/spack/packages/jdk/package.py
+++ b/var/spack/repos/builtin/packages/jdk/package.py
@@ -14,8 +14,8 @@ class Jdk(Package):
in the form of a binary product aimed at Java developers."""
homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html"
- version('8u25-linux-x64', 'e145c03a7edc845215092786bcfba77e',
- url="http://download.oracle.com/otn-pub/java/jdk/8u25-b17/jdk-8u25-linux-x64.tar.gz")
+ version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf',
+ url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz")
# Oracle requires that you accept their License Agreement in order
# to access the Java packages in download.oracle.com. In order to
diff --git a/var/spack/repos/builtin/packages/jemalloc/package.py b/var/spack/repos/builtin/packages/jemalloc/package.py
new file mode 100644
index 0000000000..8cec9ea75b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/jemalloc/package.py
@@ -0,0 +1,24 @@
+from spack import *
+
+class Jemalloc(Package):
+ """jemalloc is a general purpose malloc(3) implementation that emphasizes fragmentation avoidance and scalable concurrency support."""
+ homepage = "http://www.canonware.com/jemalloc/"
+ url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2"
+
+ version('4.0.4', '687c5cc53b9a7ab711ccd680351ff988')
+
+ variant('stats', default=False, description='Enable heap statistics')
+ variant('prof', default=False, description='Enable heap profiling')
+
+ def install(self, spec, prefix):
+ configure_args = ['--prefix=%s' % prefix,]
+
+ if '+stats' in spec:
+ configure_args.append('--enable-stats')
+ if '+prof' in spec:
+ configure_args.append('--enable-prof')
+
+ configure(*configure_args)
+
+ make()
+ make("install")
diff --git a/var/spack/packages/jpeg/package.py b/var/spack/repos/builtin/packages/jpeg/package.py
index 87820467db..87820467db 100644
--- a/var/spack/packages/jpeg/package.py
+++ b/var/spack/repos/builtin/packages/jpeg/package.py
diff --git a/var/spack/repos/builtin/packages/judy/package.py b/var/spack/repos/builtin/packages/judy/package.py
new file mode 100644
index 0000000000..b8d8701383
--- /dev/null
+++ b/var/spack/repos/builtin/packages/judy/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+class Judy(Package):
+ """A general-purpose dynamic array, associative array and hash-trie - Judy"""
+ homepage = "http://judy.sourceforge.net/"
+ url = "http://downloads.sourceforge.net/project/judy/judy/Judy-1.0.5/Judy-1.0.5.tar.gz"
+
+ version('1.0.5', '115a0d26302676e962ae2f70ec484a54')
+ parallel = False
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/julia/gc.patch b/var/spack/repos/builtin/packages/julia/gc.patch
new file mode 100644
index 0000000000..6db69c6c1b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/julia/gc.patch
@@ -0,0 +1,11 @@
+--- julia/src/gc.c
++++ julia/src/gc.c
+@@ -162,7 +162,7 @@
+ // A region is contiguous storage for up to REGION_PG_COUNT naturally aligned GC_PAGE_SZ pages
+ // It uses a very naive allocator (see malloc_page & free_page)
+ #if defined(_P64) && !defined(_COMPILER_MICROSOFT_)
+-#define REGION_PG_COUNT 16*8*4096 // 8G because virtual memory is cheap
++#define REGION_PG_COUNT 8*4096 // 512M
+ #else
+ #define REGION_PG_COUNT 8*4096 // 512M
+ #endif
diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py
new file mode 100644
index 0000000000..6900af38e4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/julia/package.py
@@ -0,0 +1,70 @@
+from spack import *
+import os
+
+class Julia(Package):
+ """The Julia Language: A fresh approach to technical computing"""
+ homepage = "http://julialang.org"
+ url = "http://github.com/JuliaLang/julia/releases/download/v0.4.2/julia-0.4.2.tar.gz"
+
+ version('0.4.3', '7b9f096798fca4bef262a64674bc2b52')
+ version('0.4.2', 'ccfeb4f4090c8b31083f5e1ccb03eb06')
+
+ patch('gc.patch')
+
+ # Build-time dependencies
+ depends_on("cmake @2.8:")
+ # depends_on("awk")
+ # depends_on("m4")
+ # depends_on("pkg-config")
+ depends_on("python @2.6:2.9")
+
+ # I think that Julia requires the dependencies above, but it builds find (on
+ # my system) without these. We should enable them as necessary.
+
+ # Run-time dependencies
+ # depends_on("arpack")
+ # depends_on("fftw +float")
+ # depends_on("gmp")
+ # depends_on("mpfr")
+ # depends_on("pcre2")
+
+ # ARPACK: Requires BLAS and LAPACK; needs to use the same version as Julia.
+
+ # BLAS and LAPACK: Julia prefers 64-bit versions on 64-bit systems. OpenBLAS
+ # has an option for this; make it available as variant.
+
+ # FFTW: Something doesn't work when using a pre-installed FFTW library; need
+ # to investigate.
+
+ # GMP, MPFR: Something doesn't work when using a pre-installed FFTW library;
+ # need to investigate.
+
+ # LLVM: Julia works only with specific versions, and might require patches.
+ # Thus we let Julia install its own LLVM.
+
+ # Other possible dependencies:
+ # USE_SYSTEM_OPENLIBM=0
+ # USE_SYSTEM_OPENSPECFUN=0
+ # USE_SYSTEM_DSFMT=0
+ # USE_SYSTEM_SUITESPARSE=0
+ # USE_SYSTEM_UTF8PROC=0
+ # USE_SYSTEM_LIBGIT2=0
+
+ def install(self, spec, prefix):
+ # Explicitly setting CC, CXX, or FC breaks building libuv, one of
+ # Julia's dependencies. This might be a Darwin-specific problem. Given
+ # how Spack sets up compilers, Julia should still use Spack's compilers,
+ # even if we don't specify them explicitly.
+ options = [#"CC=cc",
+ #"CXX=c++",
+ #"FC=fc",
+ #"USE_SYSTEM_ARPACK=1",
+ #"USE_SYSTEM_FFTW=1",
+ #"USE_SYSTEM_GMP=1",
+ #"USE_SYSTEM_MPFR=1",
+ #TODO "USE_SYSTEM_PCRE=1",
+ "prefix=%s" % prefix]
+ with open('Make.user', 'w') as f:
+ f.write('\n'.join(options) + '\n')
+ make()
+ make("install")
diff --git a/var/spack/packages/launchmon/package.py b/var/spack/repos/builtin/packages/launchmon/package.py
index 6fbe6a68d0..aec2fd6fa7 100644
--- a/var/spack/packages/launchmon/package.py
+++ b/var/spack/repos/builtin/packages/launchmon/package.py
@@ -6,7 +6,7 @@
# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -31,12 +31,22 @@ class Launchmon(Package):
url = "http://downloads.sourceforge.net/project/launchmon/launchmon/1.0.1%20release/launchmon-1.0.1.tar.gz"
version('1.0.1', '2f12465803409fd07f91174a4389eb2b')
- version('1.0.1-2', git='https://github.com/scalability-llnl/launchmon.git', commit='ff7e22424b8f375318951eb1c9282fcbbfa8aadf')
+ version('1.0.1-2', git='https://github.com/llnl/launchmon.git', commit='ff7e22424b8f375318951eb1c9282fcbbfa8aadf')
depends_on('autoconf')
depends_on('automake')
depends_on('libtool')
+
+ def patch(self):
+ # This patch makes libgcrypt compile correctly with newer gcc versions.
+ mf = FileFilter('tools/libgcrypt/tests/Makefile.in')
+ mf.filter(r'(basic_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
+ mf.filter(r'(tsexp_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
+ mf.filter(r'(keygen_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
+ mf.filter(r'(benchmark_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
+
+
def install(self, spec, prefix):
configure(
"--prefix=" + prefix,
diff --git a/var/spack/packages/launchmon/patch.lmon_install_dir b/var/spack/repos/builtin/packages/launchmon/patch.lmon_install_dir
index 8a1d93fdc9..8a1d93fdc9 100644
--- a/var/spack/packages/launchmon/patch.lmon_install_dir
+++ b/var/spack/repos/builtin/packages/launchmon/patch.lmon_install_dir
diff --git a/var/spack/packages/lcms/package.py b/var/spack/repos/builtin/packages/lcms/package.py
index a53c2f997a..a53c2f997a 100644
--- a/var/spack/packages/lcms/package.py
+++ b/var/spack/repos/builtin/packages/lcms/package.py
diff --git a/var/spack/packages/leveldb/package.py b/var/spack/repos/builtin/packages/leveldb/package.py
index da68a9cbcb..da68a9cbcb 100644
--- a/var/spack/packages/leveldb/package.py
+++ b/var/spack/repos/builtin/packages/leveldb/package.py
diff --git a/var/spack/packages/libNBC/package.py b/var/spack/repos/builtin/packages/libNBC/package.py
index 6d08f3219c..550568e97d 100644
--- a/var/spack/packages/libNBC/package.py
+++ b/var/spack/repos/builtin/packages/libNBC/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/libarchive/package.py b/var/spack/repos/builtin/packages/libarchive/package.py
index cbd4b89cd0..cbd4b89cd0 100644
--- a/var/spack/packages/libarchive/package.py
+++ b/var/spack/repos/builtin/packages/libarchive/package.py
diff --git a/var/spack/repos/builtin/packages/libcerf/package.py b/var/spack/repos/builtin/packages/libcerf/package.py
new file mode 100644
index 0000000000..15e87ce4fe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libcerf/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Libcerf(Package):
+ """
+ A self-contained C library providing complex error functions, based on Faddeeva's plasma dispersion function
+ w(z). Also provides Dawson's integral and Voigt's convolution of a Gaussian and a Lorentzian
+ """
+ homepage = "http://sourceforge.net/projects/libcerf"
+ url = "http://downloads.sourceforge.net/project/libcerf/libcerf-1.3.tgz"
+
+ version('1.3', 'b3504c467204df71e62aeccf73a25612')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/libcircle/package.py b/var/spack/repos/builtin/packages/libcircle/package.py
index 3f7c996fb0..3f7c996fb0 100644
--- a/var/spack/packages/libcircle/package.py
+++ b/var/spack/repos/builtin/packages/libcircle/package.py
diff --git a/var/spack/packages/libdrm/package.py b/var/spack/repos/builtin/packages/libdrm/package.py
index 00736b7811..00736b7811 100644
--- a/var/spack/packages/libdrm/package.py
+++ b/var/spack/repos/builtin/packages/libdrm/package.py
diff --git a/var/spack/packages/libdwarf/package.py b/var/spack/repos/builtin/packages/libdwarf/package.py
index 099a974e93..addb557519 100644
--- a/var/spack/packages/libdwarf/package.py
+++ b/var/spack/repos/builtin/packages/libdwarf/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/repos/builtin/packages/libedit/package.py b/var/spack/repos/builtin/packages/libedit/package.py
new file mode 100644
index 0000000000..bcd5212b9e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libedit/package.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class Libedit(Package):
+ """An autotools compatible port of the NetBSD editline library"""
+ homepage = "http://thrysoee.dk/editline/"
+ url = "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz"
+
+ version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz")
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py
index 9338b8f393..29bc21b65c 100644
--- a/var/spack/packages/libelf/package.py
+++ b/var/spack/repos/builtin/packages/libelf/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py
index 11b1083d67..11b1083d67 100644
--- a/var/spack/packages/libevent/package.py
+++ b/var/spack/repos/builtin/packages/libevent/package.py
diff --git a/var/spack/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py
index 2c1c4eed4d..acec031717 100644
--- a/var/spack/packages/libffi/package.py
+++ b/var/spack/repos/builtin/packages/libffi/package.py
@@ -6,11 +6,12 @@ class Libffi(Package):
to call any function specified by a call interface description at
run time."""
homepage = "https://sourceware.org/libffi/"
- url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz"
-
- version('3.1', 'f5898b29bbfd70502831a212d9249d10')
+
+ version('3.2.1','83b89587607e3eb65c70d361f13bab43',url = "ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz")
+ #version('3.1', 'f5898b29bbfd70502831a212d9249d10',url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug $(lib64) instead of ${lib64} in libffi.pc
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
+
diff --git a/var/spack/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py
index 1d0a57f317..1d0a57f317 100644
--- a/var/spack/packages/libgcrypt/package.py
+++ b/var/spack/repos/builtin/packages/libgcrypt/package.py
diff --git a/var/spack/packages/clang/package.py b/var/spack/repos/builtin/packages/libgd/package.py
index 4f10385dbd..d920957ef1 100644
--- a/var/spack/packages/clang/package.py
+++ b/var/spack/repos/builtin/packages/libgd/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -22,25 +22,32 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+
from spack import *
-class Clang(Package):
- """The goal of the Clang project is to create a new C, C++,
- Objective C and Objective C++ front-end for the LLVM compiler.
+
+class Libgd(Package):
+ """
+ GD is an open source code library for the dynamic creation of images by programmers. GD is written in C, and
+ "wrappers" are available for Perl, PHP and other languages. GD creates PNG, JPEG, GIF, WebP, XPM, BMP images,
+ among other formats. GD is commonly used to generate charts, graphics, thumbnails, and most anything else, on the
+ fly. While not restricted to use on the web, the most common applications of GD involve website development.
"""
- homepage = "http://clang.llvm.org"
- list_url = "http://llvm.org/releases/download.html"
- depends_on("llvm")
- version('3.4.2', '87945973b7c73038871c5f849a818588', url='http://llvm.org/releases/3.4.2/cfe-3.4.2.src.tar.xz')
+ homepage = "https://github.com/libgd/libgd"
+ url = "https://github.com/libgd/libgd/archive/gd-2.1.1.tar.gz"
+
+ version('2.1.1', 'e91a1a99903e460e7ba00a794e72cc1e')
+
+ depends_on('libpng')
def install(self, spec, prefix):
- env['CXXFLAGS'] = self.compiler.cxx11_flag
with working_dir('spack-build', create=True):
cmake('..',
- '-DCLANG_PATH_TO_LLVM_BUILD=%s' % spec['llvm'].prefix,
- '-DLLVM_MAIN_SRC_DIR=%s' % spec['llvm'].prefix,
+ '-DENABLE_JPEG:BOOL=ON',
+ '-DENABLE_PNG:BOOL=ON',
+ '-DENABLE_TIFF:BOOL=ON',
*std_cmake_args)
make()
make("install")
diff --git a/var/spack/packages/libgpg-error/package.py b/var/spack/repos/builtin/packages/libgpg-error/package.py
index 6c1d1a10a7..6c1d1a10a7 100644
--- a/var/spack/packages/libgpg-error/package.py
+++ b/var/spack/repos/builtin/packages/libgpg-error/package.py
diff --git a/var/spack/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
index 07ee183947..07ee183947 100644
--- a/var/spack/packages/libjpeg-turbo/package.py
+++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
diff --git a/var/spack/packages/libjson-c/package.py b/var/spack/repos/builtin/packages/libjson-c/package.py
index c0801cce9c..c0801cce9c 100644
--- a/var/spack/packages/libjson-c/package.py
+++ b/var/spack/repos/builtin/packages/libjson-c/package.py
diff --git a/var/spack/packages/libmng/package.py b/var/spack/repos/builtin/packages/libmng/package.py
index e5336ea2c2..e5336ea2c2 100644
--- a/var/spack/packages/libmng/package.py
+++ b/var/spack/repos/builtin/packages/libmng/package.py
diff --git a/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0000.patch b/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0000.patch
new file mode 100644
index 0000000000..3a90106850
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0000.patch
@@ -0,0 +1,18 @@
+--- libmonitor-20130218/configure 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/configure 2013-02-18 10:34:05.237918411 -0800
+@@ -3600,13 +3600,13 @@
+ CFLAGS=$ac_save_CFLAGS
+ elif test $ac_cv_prog_cc_g = yes; then
+ if test "$GCC" = yes; then
+- CFLAGS="-g -O2"
++ CFLAGS="-g -O0"
+ else
+ CFLAGS="-g"
+ fi
+ else
+ if test "$GCC" = yes; then
+- CFLAGS="-O2"
++ CFLAGS="-O0"
+ else
+ CFLAGS=
+ fi
diff --git a/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0001.patch b/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0001.patch
new file mode 100644
index 0000000000..85d69595c0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0001.patch
@@ -0,0 +1,395 @@
+--- libmonitor-20130218/src/callback.c 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/callback.c 2013-02-18 10:34:17.839842826 -0800
+@@ -173,3 +173,18 @@
+ MONITOR_DEBUG1("(default callback)\n");
+ return 0;
+ }
++
++
++void __attribute__ ((weak))
++monitor_mpi_post_comm_rank(void)
++{
++ MONITOR_DEBUG1("(default callback)\n");
++}
++
++void __attribute__ ((weak))
++monitor_mpi_pcontrol(int level)
++{
++ MONITOR_DEBUG("(default callback) level = %d\n", level);
++}
++
++
+--- libmonitor-20130218/src/main.c 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/main.c 2013-02-18 10:34:17.839842826 -0800
+@@ -230,6 +230,44 @@
+ monitor_fini_library_called = 1;
+ }
+
++/*
++ * Internal monitor functions.
++ */
++
++struct monitor_thread_node * __attribute__ ((weak))
++monitor_get_tn(void)
++{
++ return &monitor_main_tn;
++}
++
++int __attribute__ ((weak))
++monitor_get_thread_num(void)
++{
++ return (0);
++}
++
++void __attribute__ ((weak))
++monitor_reset_thread_list(struct monitor_thread_node *main_tn)
++{
++ MONITOR_DEBUG1("(weak)\n");
++ return;
++}
++
++void __attribute__ ((weak))
++monitor_thread_release(void)
++{
++ MONITOR_DEBUG1("(weak)\n");
++ return;
++}
++
++void __attribute__ ((weak))
++monitor_thread_shootdown(void)
++{
++ MONITOR_DEBUG1("(weak)\n");
++ return;
++}
++
++
+ void
+ monitor_begin_process_fcn(void *user_data, int is_fork)
+ {
+@@ -625,12 +663,6 @@
+ return (monitor_main_tn.tn_user_data);
+ }
+
+-int __attribute__ ((weak))
+-monitor_get_thread_num(void)
+-{
+- return (0);
+-}
+-
+ void * __attribute__ ((weak))
+ monitor_get_addr_thread_start(void)
+ {
+@@ -691,33 +723,3 @@
+ MONITOR_DEBUG1("(weak)\n");
+ return (FALSE);
+ }
+-
+-/*
+- * Internal monitor functions.
+- */
+-struct monitor_thread_node * __attribute__ ((weak))
+-monitor_get_tn(void)
+-{
+- return &monitor_main_tn;
+-}
+-
+-void __attribute__ ((weak))
+-monitor_reset_thread_list(struct monitor_thread_node *main_tn)
+-{
+- MONITOR_DEBUG1("(weak)\n");
+- return;
+-}
+-
+-void __attribute__ ((weak))
+-monitor_thread_release(void)
+-{
+- MONITOR_DEBUG1("(weak)\n");
+- return;
+-}
+-
+-void __attribute__ ((weak))
+-monitor_thread_shootdown(void)
+-{
+- MONITOR_DEBUG1("(weak)\n");
+- return;
+-}
+--- libmonitor-20130218/src/monitor.h 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/monitor.h 2013-02-18 10:34:17.840842821 -0800
+@@ -73,6 +73,11 @@
+ extern void monitor_fini_mpi(void);
+ extern void monitor_mpi_post_fini(void);
+
++extern void monitor_mpi_post_comm_rank(void);
++extern void monitor_mpi_pcontrol(int level);
++
++
++
+ /*
+ * Monitor support functions.
+ */
+--- libmonitor-20130218/src/mpi_comm_c.c 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_comm_c.c 2013-02-18 10:34:17.878842622 -0800
+@@ -36,5 +36,8 @@
+ ret = (*real_mpi_comm_rank)(comm, rank);
+ monitor_set_mpi_size_rank(size, *rank);
+
++ monitor_mpi_post_comm_rank();
++
++
+ return (ret);
+ }
+--- libmonitor-20130218/src/mpi_comm_f0.c 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_comm_f0.c 2013-02-18 10:34:17.879842617 -0800
+@@ -34,4 +34,5 @@
+ (*real_mpi_comm_size)(comm, &size, ierror);
+ (*real_mpi_comm_rank)(comm, rank, ierror);
+ monitor_set_mpi_size_rank(size, *rank);
++ monitor_mpi_post_comm_rank();
+ }
+--- libmonitor-20130218/src/mpi_comm_f1.c 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_comm_f1.c 2013-02-18 10:34:17.880842612 -0800
+@@ -34,4 +34,5 @@
+ (*real_mpi_comm_size)(comm, &size, ierror);
+ (*real_mpi_comm_rank)(comm, rank, ierror);
+ monitor_set_mpi_size_rank(size, *rank);
++ monitor_mpi_post_comm_rank();
+ }
+--- libmonitor-20130218/src/mpi_comm_f2.c 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_comm_f2.c 2013-02-18 10:34:17.880842612 -0800
+@@ -34,4 +34,5 @@
+ (*real_mpi_comm_size)(comm, &size, ierror);
+ (*real_mpi_comm_rank)(comm, rank, ierror);
+ monitor_set_mpi_size_rank(size, *rank);
++ monitor_mpi_post_comm_rank();
+ }
+--- libmonitor-20130218/src/pmpi.c 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/pmpi.c 2013-02-18 10:34:17.881842607 -0800
+@@ -52,11 +52,15 @@
+ typedef int mpi_init_thread_fcn_t(int *, char ***, int, int *);
+ typedef int mpi_finalize_fcn_t(void);
+ typedef int mpi_comm_fcn_t(void *, int *);
++typedef int mpi_pcontrol_fcn_t(int );
++
+
+ typedef void f_mpi_init_fcn_t(int *);
+ typedef void f_mpi_init_thread_fcn_t(int *, int *, int *);
+ typedef void f_mpi_finalize_fcn_t(int *);
+ typedef void f_mpi_comm_fcn_t(int *, int *, int *);
++typedef int f_mpi_pcontrol_fcn_t(int );
++
+
+ static mpi_init_fcn_t *real_pmpi_init = NULL;
+ static f_mpi_init_fcn_t *real_pmpi_init_f0 = NULL;
+@@ -83,6 +87,12 @@
+ static f_mpi_comm_fcn_t *real_pmpi_comm_rank_f1 = NULL;
+ static f_mpi_comm_fcn_t *real_pmpi_comm_rank_f2 = NULL;
+
++static mpi_pcontrol_fcn_t *real_pmpi_pcontrol = NULL;
++static f_mpi_pcontrol_fcn_t *real_pmpi_pcontrol_f0 = NULL;
++static f_mpi_pcontrol_fcn_t *real_pmpi_pcontrol_f1 = NULL;
++static f_mpi_pcontrol_fcn_t *real_pmpi_pcontrol_f2 = NULL;
++
++
+ /*
+ *----------------------------------------------------------------------
+ * PMPI_INIT OVERRIDE FUNCTIONS
+@@ -297,6 +307,7 @@
+ ret = (*real_pmpi_comm_size)(comm, &size);
+ ret = (*real_pmpi_comm_rank)(comm, rank);
+ monitor_set_mpi_size_rank(size, *rank);
++ monitor_mpi_post_comm_rank();
+
+ return (ret);
+ }
+@@ -308,7 +319,9 @@
+ MONITOR_GET_REAL_NAME_WRAP(rank_var, rank_fcn); \
+ (*size_var)(comm, &size, ierror); \
+ (*rank_var)(comm, rank, ierror); \
+- monitor_set_mpi_size_rank(size, *rank);
++ monitor_set_mpi_size_rank(size, *rank); \
++ monitor_mpi_post_comm_rank();
++
+
+ /*
+ * In Fortran, MPI_Comm is always int.
+@@ -333,3 +346,48 @@
+ FORTRAN_COMM_RANK_BODY(real_pmpi_comm_size_f2, pmpi_comm_size__,
+ real_pmpi_comm_rank_f2, pmpi_comm_rank__);
+ }
++
++
++/*
++ *----------------------------------------------------------------------
++ * PMPI_PCONTROL OVERRIDE FUNCTIONS
++ *----------------------------------------------------------------------
++ */
++
++int
++MONITOR_WRAP_NAME(PMPI_Pcontrol)(int level)
++{
++ int ret;
++
++ MONITOR_DEBUG("level = %d\n", level); \
++ MONITOR_GET_REAL_NAME_WRAP(real_pmpi_pcontrol, PMPI_Pcontrol);
++ ret = (*real_pmpi_pcontrol) (level) ;
++ monitor_mpi_pcontrol(level);
++
++ return (ret);
++}
++
++#define FORTRAN_PCONTROL_BODY(var_name, fcn_name)\
++ int ret; \
++ MONITOR_DEBUG("level = %d\n", level); \
++ MONITOR_GET_REAL_NAME_WRAP(var_name, fcn_name); \
++ ret = (*var_name) (level) ; \
++ monitor_mpi_pcontrol(level);
++
++int
++MONITOR_WRAP_NAME(pmpi_pcontrol)(int level)
++{
++ FORTRAN_PCONTROL_BODY(real_pmpi_pcontrol_f0, pmpi_pcontrol);
++}
++
++int
++MONITOR_WRAP_NAME(pmpi_pcontrol_)(int level)
++{
++ FORTRAN_PCONTROL_BODY(real_pmpi_pcontrol_f1, pmpi_pcontrol_);
++}
++
++int
++MONITOR_WRAP_NAME(pmpi_pcontrol__)(int level)
++{
++ FORTRAN_PCONTROL_BODY(real_pmpi_pcontrol_f2, pmpi_pcontrol__);
++}
+--- libmonitor-20130218/src/mpi_pcontrol_c.c 1969-12-31 16:00:00.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_pcontrol_c.c 2013-02-18 10:34:17.882842602 -0800
+@@ -0,0 +1,30 @@
++/*
++ * Override MPI_Pcontrol in C/C++.
++ *
++ *
++ */
++
++#include "config.h"
++#include "common.h"
++#include "monitor.h"
++
++typedef int mpi_pcontrol_fcn_t(int level);
++#ifdef MONITOR_STATIC
++extern mpi_pcontrol_fcn_t __real_MPI_Pcontrol;
++#endif
++static mpi_pcontrol_fcn_t *real_mpi_pcontrol = NULL;
++
++int
++MONITOR_WRAP_NAME(MPI_Pcontrol)(int level)
++{
++ int ret, count;
++
++ MONITOR_DEBUG("level = %d\n", level); \
++
++ MONITOR_GET_REAL_NAME_WRAP(real_mpi_pcontrol, MPI_Pcontrol);
++ ret = (*real_mpi_pcontrol)(level);
++ monitor_mpi_pcontrol(level);
++
++
++ return (ret);
++}
+--- libmonitor-20130218/src/mpi_pcontrol_f0.c 1969-12-31 16:00:00.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_pcontrol_f0.c 2013-02-18 10:34:17.882842602 -0800
+@@ -0,0 +1,24 @@
++/*
++ * Override mpi_pcontrol in Fortran.
++ *
++ */
++
++#include "config.h"
++#include "common.h"
++#include "monitor.h"
++
++typedef void mpi_pcontrol_fcn_t(int level);
++#ifdef MONITOR_STATIC
++extern mpi_pcontrol_fcn_t __real_mpi_pcontrol;
++#endif
++static mpi_pcontrol_fcn_t *real_mpi_pcontrol = NULL;
++
++int
++MONITOR_WRAP_NAME(mpi_pcontrol)(int level)
++{
++ int count;
++
++ MONITOR_DEBUG1("\n");
++ MONITOR_GET_REAL_NAME_WRAP(real_mpi_pcontrol, mpi_pcontrol);
++ (*real_mpi_pcontrol)(level);
++}
+--- libmonitor-20130218/src/mpi_pcontrol_f1.c 1969-12-31 16:00:00.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_pcontrol_f1.c 2013-02-18 10:34:17.883842597 -0800
+@@ -0,0 +1,24 @@
++/*
++ * Override mpi_pcontrol_ in Fortran.
++ *
++ */
++
++#include "config.h"
++#include "common.h"
++#include "monitor.h"
++
++typedef void mpi_pcontrol_fcn_t(int level);
++#ifdef MONITOR_STATIC
++extern mpi_pcontrol_fcn_t __real_mpi_pcontrol_;
++#endif
++static mpi_pcontrol_fcn_t *real_mpi_pcontrol = NULL;
++
++int
++MONITOR_WRAP_NAME(mpi_pcontrol_)(int level)
++{
++ int count;
++
++ MONITOR_DEBUG1("\n");
++ MONITOR_GET_REAL_NAME_WRAP(real_mpi_pcontrol, mpi_pcontrol_);
++ (*real_mpi_pcontrol)(level);
++}
+--- libmonitor-20130218/src/mpi_pcontrol_f2.c 1969-12-31 16:00:00.000000000 -0800
++++ libmonitor-20130218-fixes/src/mpi_pcontrol_f2.c 2013-02-18 10:34:17.883842597 -0800
+@@ -0,0 +1,24 @@
++/*
++ * Override mpi_pcontrol__ in Fortran.
++ *
++ */
++
++#include "config.h"
++#include "common.h"
++#include "monitor.h"
++
++typedef void mpi_pcontrol_fcn_t(int level);
++#ifdef MONITOR_STATIC
++extern mpi_pcontrol_fcn_t __real_mpi_pcontrol__;
++#endif
++static mpi_pcontrol_fcn_t *real_mpi_pcontrol = NULL;
++
++int
++MONITOR_WRAP_NAME(mpi_pcontrol__)(int level)
++{
++ int count;
++
++ MONITOR_DEBUG1("\n");
++ MONITOR_GET_REAL_NAME_WRAP(real_mpi_pcontrol, mpi_pcontrol__);
++ (*real_mpi_pcontrol)(level);
++}
+--- libmonitor-20130218/src/Makefile.am 2013-02-17 23:08:32.000000000 -0800
++++ libmonitor-20130218-fixes/src/Makefile.am 2013-02-18 10:34:17.931842343 -0800
+@@ -38,10 +38,11 @@
+ MONITOR_THREAD_FILES = pthread.c
+ MONITOR_SCRIPT_FILES = monitor-link monitor-run
+ MONITOR_MPI_FILES = \
+- mpi_init_c.c mpi_init_thread_c.c mpi_final_c.c mpi_comm_c.c \
+- mpi_init_f0.c mpi_init_thread_f0.c mpi_final_f0.c mpi_comm_f0.c \
+- mpi_init_f1.c mpi_init_thread_f1.c mpi_final_f1.c mpi_comm_f1.c \
+- mpi_init_f2.c mpi_init_thread_f2.c mpi_final_f2.c mpi_comm_f2.c
++ mpi_init_c.c mpi_init_thread_c.c mpi_final_c.c mpi_comm_c.c mpi_pcontrol_c.c \
++ mpi_init_f0.c mpi_init_thread_f0.c mpi_final_f0.c mpi_comm_f0.c mpi_pcontrol_f0.c \
++ mpi_init_f1.c mpi_init_thread_f1.c mpi_final_f1.c mpi_comm_f1.c mpi_pcontrol_f1.c \
++ mpi_init_f2.c mpi_init_thread_f2.c mpi_final_f2.c mpi_comm_f2.c mpi_pcontrol_f2.c
++
+
+ include_HEADERS = monitor.h
+ bin_SCRIPTS =
diff --git a/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0002.patch b/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0002.patch
new file mode 100644
index 0000000000..514dfdf13e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libmonitor/libmonitorkrell-0002.patch
@@ -0,0 +1,106 @@
+--- libmonitor-20130218/src/pmpi.c 2013-02-18 11:34:17.000000000 -0700
++++ libmonitor-20130218-fixed/src/pmpi.c 2013-04-11 10:03:59.300550393 -0600
+@@ -355,7 +355,7 @@
+ */
+
+ int
+-MONITOR_WRAP_NAME(PMPI_Pcontrol)(int level)
++MONITOR_WRAP_NAME(PMPI_Pcontrol)(int level, int *ierror )
+ {
+ int ret;
+
+@@ -375,19 +375,19 @@
+ monitor_mpi_pcontrol(level);
+
+ int
+-MONITOR_WRAP_NAME(pmpi_pcontrol)(int level)
++MONITOR_WRAP_NAME(pmpi_pcontrol)(int level, int *ierror )
+ {
+ FORTRAN_PCONTROL_BODY(real_pmpi_pcontrol_f0, pmpi_pcontrol);
+ }
+
+ int
+-MONITOR_WRAP_NAME(pmpi_pcontrol_)(int level)
++MONITOR_WRAP_NAME(pmpi_pcontrol_)(int level, int *ierror )
+ {
+ FORTRAN_PCONTROL_BODY(real_pmpi_pcontrol_f1, pmpi_pcontrol_);
+ }
+
+ int
+-MONITOR_WRAP_NAME(pmpi_pcontrol__)(int level)
++MONITOR_WRAP_NAME(pmpi_pcontrol__)(int level, int *ierror )
+ {
+ FORTRAN_PCONTROL_BODY(real_pmpi_pcontrol_f2, pmpi_pcontrol__);
+ }
+--- libmonitor-20130218/src/mpi_pcontrol_f0.c 2013-02-18 11:34:17.000000000 -0700
++++ libmonitor-20130218-fixed/src/mpi_pcontrol_f0.c 2013-04-11 10:13:47.783002000 -0600
+@@ -7,18 +7,18 @@
+ #include "common.h"
+ #include "monitor.h"
+
+-typedef void mpi_pcontrol_fcn_t(int level);
++typedef void mpi_pcontrol_fcn_t(int level, int *ierror);
+ #ifdef MONITOR_STATIC
+ extern mpi_pcontrol_fcn_t __real_mpi_pcontrol;
+ #endif
+ static mpi_pcontrol_fcn_t *real_mpi_pcontrol = NULL;
+
+ int
+-MONITOR_WRAP_NAME(mpi_pcontrol)(int level)
++MONITOR_WRAP_NAME(mpi_pcontrol)(int level, int *ierror)
+ {
+ int count;
+
+ MONITOR_DEBUG1("\n");
+ MONITOR_GET_REAL_NAME_WRAP(real_mpi_pcontrol, mpi_pcontrol);
+- (*real_mpi_pcontrol)(level);
++ (*real_mpi_pcontrol)(level, ierror);
+ }
+--- libmonitor-20130218/src/mpi_pcontrol_f1.c 2013-02-18 11:34:17.000000000 -0700
++++ libmonitor-20130218-fixed/src/mpi_pcontrol_f1.c 2013-04-11 10:14:08.039214000 -0600
+@@ -7,18 +7,18 @@
+ #include "common.h"
+ #include "monitor.h"
+
+-typedef void mpi_pcontrol_fcn_t(int level);
++typedef void mpi_pcontrol_fcn_t(int level, int *ierror);
+ #ifdef MONITOR_STATIC
+ extern mpi_pcontrol_fcn_t __real_mpi_pcontrol_;
+ #endif
+ static mpi_pcontrol_fcn_t *real_mpi_pcontrol = NULL;
+
+ int
+-MONITOR_WRAP_NAME(mpi_pcontrol_)(int level)
++MONITOR_WRAP_NAME(mpi_pcontrol_)(int level, int *ierror)
+ {
+ int count;
+
+ MONITOR_DEBUG1("\n");
+ MONITOR_GET_REAL_NAME_WRAP(real_mpi_pcontrol, mpi_pcontrol_);
+- (*real_mpi_pcontrol)(level);
++ (*real_mpi_pcontrol)(level, ierror);
+ }
+--- libmonitor-20130218/src/mpi_pcontrol_f2.c 2013-02-18 11:34:17.000000000 -0700
++++ libmonitor-20130218-fixed/src/mpi_pcontrol_f2.c 2013-04-11 10:14:19.000960000 -0600
+@@ -7,18 +7,18 @@
+ #include "common.h"
+ #include "monitor.h"
+
+-typedef void mpi_pcontrol_fcn_t(int level);
++typedef void mpi_pcontrol_fcn_t(int level, int *ierror);
+ #ifdef MONITOR_STATIC
+ extern mpi_pcontrol_fcn_t __real_mpi_pcontrol__;
+ #endif
+ static mpi_pcontrol_fcn_t *real_mpi_pcontrol = NULL;
+
+ int
+-MONITOR_WRAP_NAME(mpi_pcontrol__)(int level)
++MONITOR_WRAP_NAME(mpi_pcontrol__)(int level, int *ierror)
+ {
+ int count;
+
+ MONITOR_DEBUG1("\n");
+ MONITOR_GET_REAL_NAME_WRAP(real_mpi_pcontrol, mpi_pcontrol__);
+- (*real_mpi_pcontrol)(level);
++ (*real_mpi_pcontrol)(level, ierror);
+ }
diff --git a/var/spack/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py
index 3b95b86ddf..eecf1963e3 100644
--- a/var/spack/packages/libmonitor/package.py
+++ b/var/spack/repos/builtin/packages/libmonitor/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -29,6 +29,13 @@ class Libmonitor(Package):
homepage = "http://hpctoolkit.org"
version('20130218', svn='http://libmonitor.googlecode.com/svn/trunk/', revision=146)
+ variant('krellpatch', default=False, description="build with openspeedshop based patch.")
+
+
+ patch('libmonitorkrell-0000.patch', when='@20130218+krellpatch')
+ patch('libmonitorkrell-0001.patch', when='@20130218+krellpatch')
+ patch('libmonitorkrell-0002.patch', when='@20130218+krellpatch')
+
def install(self, spec, prefix):
configure("--prefix=" + prefix)
diff --git a/var/spack/repos/builtin/packages/libpciaccess/package.py b/var/spack/repos/builtin/packages/libpciaccess/package.py
new file mode 100644
index 0000000000..0c0847d323
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libpciaccess/package.py
@@ -0,0 +1,23 @@
+from spack import *
+import os.path
+
+class Libpciaccess(Package):
+ """Generic PCI access library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/"
+ url = "http://xorg.freedesktop.org/archive/individual/lib/libpciaccess-0.13.4.tar.bz2"
+
+ version('0.13.4', 'ace78aec799b1cf6dfaea55d3879ed9f')
+
+ depends_on('libtool')
+
+ def install(self, spec, prefix):
+ # libpciaccess does not support OS X
+ if spec.satisfies('=darwin-x86_64'):
+ # create a dummy directory
+ mkdir(prefix.lib)
+ return
+
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py
index e02b08663e..e02b08663e 100644
--- a/var/spack/packages/libpng/package.py
+++ b/var/spack/repos/builtin/packages/libpng/package.py
diff --git a/var/spack/packages/libsodium/package.py b/var/spack/repos/builtin/packages/libsodium/package.py
index 1c8a16d998..1c8a16d998 100644
--- a/var/spack/packages/libsodium/package.py
+++ b/var/spack/repos/builtin/packages/libsodium/package.py
diff --git a/var/spack/packages/libtiff/package.py b/var/spack/repos/builtin/packages/libtiff/package.py
index 63c6704cb8..63c6704cb8 100644
--- a/var/spack/packages/libtiff/package.py
+++ b/var/spack/repos/builtin/packages/libtiff/package.py
diff --git a/var/spack/packages/libtool/package.py b/var/spack/repos/builtin/packages/libtool/package.py
index a07daf9781..82a54953b2 100644
--- a/var/spack/packages/libtool/package.py
+++ b/var/spack/repos/builtin/packages/libtool/package.py
@@ -5,6 +5,7 @@ class Libtool(Package):
homepage = "https://www.gnu.org/software/libtool/"
url = "http://ftpmirror.gnu.org/libtool/libtool-2.4.2.tar.gz"
+ version('2.4.6' , 'addf44b646ddb4e3919805aa88fa7c5e')
version('2.4.2' , 'd2f3b7d4627e69e13514a40e72a24d50')
def install(self, spec, prefix):
diff --git a/var/spack/packages/libunwind/package.py b/var/spack/repos/builtin/packages/libunwind/package.py
index 239fcbcfd5..6f162f7b08 100644
--- a/var/spack/packages/libunwind/package.py
+++ b/var/spack/repos/builtin/packages/libunwind/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/libuuid/package.py b/var/spack/repos/builtin/packages/libuuid/package.py
index 373c5bfcac..373c5bfcac 100644
--- a/var/spack/packages/libuuid/package.py
+++ b/var/spack/repos/builtin/packages/libuuid/package.py
diff --git a/var/spack/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py
index 521cd0d475..1dd5954c99 100644
--- a/var/spack/packages/libxcb/package.py
+++ b/var/spack/repos/builtin/packages/libxcb/package.py
@@ -1,19 +1,23 @@
from spack import *
class Libxcb(Package):
- """The X protocol C-language Binding (XCB) is a replacement
- for Xlib featuring a small footprint, latency hiding, direct
- access to the protocol, improved threading support, and
+ """The X protocol C-language Binding (XCB) is a replacement
+ for Xlib featuring a small footprint, latency hiding, direct
+ access to the protocol, improved threading support, and
extensibility."""
homepage = "http://xcb.freedesktop.org/"
url = "http://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz"
version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb')
-
+ version('1.11.1', '118623c15a96b08622603a71d8789bf3')
depends_on("python")
depends_on("xcb-proto")
+ def patch(self):
+ filter_file('typedef struct xcb_auth_info_t {', 'typedef struct {', 'src/xcb.h')
+
+
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py
new file mode 100644
index 0000000000..134e596963
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxml2/package.py
@@ -0,0 +1,30 @@
+from spack import *
+import os
+
+class Libxml2(Package):
+ """Libxml2 is the XML C parser and toolkit developed for the Gnome
+ project (but usable outside of the Gnome platform), it is free
+ software available under the MIT License."""
+ homepage = "http://xmlsoft.org"
+ url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz"
+
+ version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
+
+ variant('python', default=False, description='Enable Python support')
+
+ extends('python', when='+python', ignore=r'(bin.*$)|(include.*$)|(share.*$)|(lib/libxml2.*$)|(lib/xml2.*$)|(lib/cmake.*$)')
+ depends_on('zlib')
+ depends_on('xz')
+
+ def install(self, spec, prefix):
+ if '+python' in spec:
+ site_packages_dir = os.path.join(prefix, 'lib/python%s.%s/site-packages' %(spec['python'].version[:2]))
+ python_args = ["--with-python=%s" % spec['python'].prefix, "--with-python-install-dir=%s" % site_packages_dir]
+ else:
+ python_args = ["--without-python"]
+
+ configure("--prefix=%s" % prefix,
+ *python_args)
+
+ make()
+ make("install")
diff --git a/var/spack/packages/libxshmfence/package.py b/var/spack/repos/builtin/packages/libxshmfence/package.py
index 3aa2448b46..3aa2448b46 100644
--- a/var/spack/packages/libxshmfence/package.py
+++ b/var/spack/repos/builtin/packages/libxshmfence/package.py
diff --git a/var/spack/packages/libxslt/package.py b/var/spack/repos/builtin/packages/libxslt/package.py
index f97332d020..f97332d020 100644
--- a/var/spack/packages/libxslt/package.py
+++ b/var/spack/repos/builtin/packages/libxslt/package.py
diff --git a/var/spack/packages/llvm-lld/package.py b/var/spack/repos/builtin/packages/llvm-lld/package.py
index f229211396..cb91aa22a5 100644
--- a/var/spack/packages/llvm-lld/package.py
+++ b/var/spack/repos/builtin/packages/llvm-lld/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py
new file mode 100644
index 0000000000..a2b2c6eccc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/llvm/package.py
@@ -0,0 +1,218 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by David Beckingsale, david@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os, shutil
+
+
+class Llvm(Package):
+ """The LLVM Project is a collection of modular and reusable compiler and
+ toolchain technologies. Despite its name, LLVM has little to do with
+ traditional virtual machines, though it does provide helpful libraries
+ that can be used to build them. The name "LLVM" itself is not an acronym;
+ it is the full name of the project.
+ """
+ homepage = 'http://llvm.org/'
+ url = 'http://llvm.org/releases/3.7.0/llvm-3.7.0.src.tar.xz'
+
+ version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') # currently required by mesa package
+
+ variant('debug', default=False, description="Build a debug version of LLVM, this increases binary size by an order of magnitude, make sure you have 20-30gb of space available to build this")
+ variant('clang', default=True, description="Build the LLVM C/C++/Objective-C compiler frontend")
+ variant('lldb', default=True, description="Build the LLVM debugger")
+ variant('internal_unwind', default=True, description="Build the libcxxabi libunwind")
+ variant('polly', default=True, description="Build the LLVM polyhedral optimization plugin, only builds for 3.7.0+")
+ variant('libcxx', default=True, description="Build the LLVM C++ standard library")
+ variant('compiler-rt', default=True, description="Build the LLVM compiler runtime, including sanitizers")
+ variant('gold', default=True, description="Add support for LTO with the gold linker plugin")
+
+
+ # Build dependency
+ depends_on('cmake @2.8.12.2:')
+
+ # Universal dependency
+ depends_on('python@2.7:')
+
+ # lldb dependencies
+ depends_on('ncurses', when='+lldb')
+ depends_on('swig', when='+lldb')
+ depends_on('libedit', when='+lldb')
+
+ # gold support
+ depends_on('binutils+gold', when='+gold')
+
+ # polly plugin
+ depends_on('gmp', when='@:3.6.999 +polly')
+ depends_on('isl', when='@:3.6.999 +polly')
+
+ base_url = 'http://llvm.org/releases/%%(version)s/%(pkg)s-%%(version)s.src.tar.xz'
+ llvm_url = base_url % { 'pkg' : 'llvm'}
+
+ resources = {
+ 'compiler-rt' : {
+ 'url' : base_url % { 'pkg' : 'compiler-rt'},
+ 'destination' : 'projects',
+ 'placement' : 'compiler-rt',
+ },
+ 'openmp' : {
+ 'url' : base_url % { 'pkg' : 'openmp'},
+ 'destination' : 'projects',
+ 'placement' : 'openmp',
+ },
+ 'libcxx' : {
+ 'url' : base_url % { 'pkg' : 'libcxx'},
+ 'destination' : 'projects',
+ 'placement' : 'libcxx',
+ },
+ 'libcxxabi' : {
+ 'url' : base_url % { 'pkg' : 'libcxxabi'},
+ 'destination' : 'projects',
+ 'placement' : 'libcxxabi',
+ },
+ 'clang' : {
+ 'url' : base_url % { 'pkg' : 'cfe'},
+ 'destination' : 'tools',
+ 'placement' : 'clang',
+ },
+ 'clang-tools-extra' : {
+ 'url' : base_url % { 'pkg' : 'clang-tools-extra'},
+ 'destination' : 'tools/clang/tools',
+ 'placement' : 'extra',
+ },
+ 'lldb' : {
+ 'url' : base_url % { 'pkg' : 'lldb'},
+ 'destination' : 'tools',
+ 'placement' : 'lldb',
+ },
+ 'polly' : {
+ 'url' : base_url % { 'pkg' : 'polly'},
+ 'destination' : 'tools',
+ 'placement' : 'polly',
+ },
+ 'llvm-libunwind' : {
+ 'url' : base_url % { 'pkg' : 'libunwind'},
+ 'destination' : 'projects',
+ 'placement' : 'libunwind',
+ },
+ }
+ releases = [
+ {
+ 'version' : '3.7.0',
+ 'md5':'b98b9495e5655a672d6cb83e1a180f8e',
+ 'resources' : {
+ 'compiler-rt' : '383c10affd513026f08936b5525523f5',
+ 'openmp' : 'f482c86fdead50ba246a1a2b0bbf206f',
+ 'polly' : '32f93ffc9cc7e042df22089761558f8b',
+ 'libcxx' : '46aa5175cbe1ad42d6e9c995968e56dd',
+ 'libcxxabi' : '5aa769e2fca79fa5335cfae8f6258772',
+ 'clang' : '8f9d27335e7331cf0a4711e952f21f01',
+ 'clang-tools-extra' : 'd5a87dacb65d981a427a536f6964642e',
+ 'lldb' : 'e5931740400d1dc3e7db4c7ba2ceff68',
+ 'llvm-libunwind' : '9a75392eb7eb8ed5c0840007e212baf5',
+ }
+ },
+ {
+ 'version' : '3.6.2',
+ 'md5':'0c1ee3597d75280dee603bae9cbf5cc2',
+ 'resources' : {
+ 'compiler-rt' : 'e3bc4eb7ba8c39a6fe90d6c988927f3c',
+ 'openmp' : '65dd5863b9b270960a96817e9152b123',
+ 'libcxx' : '22214c90697636ef960a49aef7c1823a',
+ 'libcxxabi' : '17518e361e4e228f193dd91e8ef54ba2',
+ 'clang' : 'ff862793682f714bb7862325b9c06e20',
+ 'clang-tools-extra' : '3ebc1dc41659fcec3db1b47d81575e06',
+ 'lldb' : '51e5eb552f777b950bb0ff326e60d5f0',
+ }
+ },
+ {
+ 'version' : '3.5.1',
+ 'md5':'2d3d8004f38852aa679e5945b8ce0b14',
+ 'resources' : {
+ 'compiler-rt' : 'd626cfb8a9712cb92b820798ab5bc1f8',
+ 'openmp' : '121ddb10167d7fc38b1f7e4b029cf059',
+ 'libcxx' : '406f09b1dab529f3f7879f4d548329d2',
+ 'libcxxabi' : 'b22c707e8d474a99865ad3c521c3d464',
+ 'clang' : '93f9532f8f7e6f1d8e5c1116907051cb',
+ 'clang-tools-extra' : 'f13f31ed3038acadc6fa63fef812a246',
+ 'lldb' : 'cc5ea8a414c62c33e760517f8929a204',
+ }
+ },
+ ]
+
+ for release in releases:
+ version(release['version'], release['md5'], url=llvm_url % release)
+
+ for name, md5 in release['resources'].items():
+ resource(name=name,
+ url=resources[name]['url'] % release,
+ md5=md5,
+ destination=resources[name]['destination'],
+ when='@%(version)s' % release,
+ placement=resources[name].get('placement', None))
+
+ def install(self, spec, prefix):
+ env['CXXFLAGS'] = self.compiler.cxx11_flag
+ cmake_args = [ arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg ]
+
+ build_type = 'RelWithDebInfo' if '+debug' in spec else 'Release'
+ cmake_args.extend([
+ '..',
+ '-DCMAKE_BUILD_TYPE=' + build_type,
+ '-DLLVM_REQUIRES_RTTI:BOOL=ON',
+ '-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp',
+ '-DPYTHON_EXECUTABLE:PATH=%s/bin/python' % spec['python'].prefix ])
+
+ if '+gold' in spec:
+ cmake_args.append('-DLLVM_BINUTILS_INCDIR=' + os.path.join( spec['binutils'].prefix, 'include'))
+ if '+polly' in spec:
+ cmake_args.append('-DLINK_POLLY_INTO_TOOLS:Bool=ON')
+ else:
+ cmake_args.append('-DLLVM_EXTERNAL_POLLY_BUILD:Bool=OFF')
+
+ if '+clang' not in spec:
+ cmake_args.append('-DLLVM_EXTERNAL_CLANG_BUILD:Bool=OFF')
+ if '+lldb' not in spec:
+ cmake_args.append('-DLLVM_EXTERNAL_LLDB_BUILD:Bool=OFF')
+ if '+internal_unwind' not in spec:
+ cmake_args.append('-DLLVM_EXTERNAL_LIBUNWIND_BUILD:Bool=OFF')
+ if '+libcxx' not in spec:
+ cmake_args.append('-DLLVM_EXTERNAL_LIBCXX_BUILD:Bool=OFF')
+ cmake_args.append('-DLLVM_EXTERNAL_LIBCXXABI_BUILD:Bool=OFF')
+ if '+compiler-rt' not in spec:
+ cmake_args.append('-DLLVM_EXTERNAL_COMPILER_RT_BUILD:Bool=OFF')
+
+ if '+clang' not in spec:
+ if '+clang_extra' in spec:
+ raise SpackException('The clang_extra variant requires the clang variant to be selected')
+ if '+lldb' in spec:
+ raise SpackException('The lldb variant requires the clang variant to be selected')
+
+ with working_dir('spack-build', create=True):
+ cmake(*cmake_args)
+ make()
+ make("install")
+ query_path = os.path.join('bin', 'clang-query')
+ # Manually install clang-query, because llvm doesn't...
+ if os.path.exists(query_path):
+ shutil.copy(query_path, os.path.join(prefix, 'bin'))
diff --git a/var/spack/packages/lmdb/package.py b/var/spack/repos/builtin/packages/lmdb/package.py
index 875b8100c5..875b8100c5 100644
--- a/var/spack/packages/lmdb/package.py
+++ b/var/spack/repos/builtin/packages/lmdb/package.py
diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py
new file mode 100644
index 0000000000..d642594f92
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lmod/package.py
@@ -0,0 +1,26 @@
+from spack import *
+import os
+
+class Lmod(Package):
+ """
+ Lmod is a Lua based module system that easily handles the MODULEPATH
+ Hierarchical problem. Environment Modules provide a convenient way to
+ dynamically change the users' environment through modulefiles. This
+ includes easily adding or removing directories to the PATH environment
+ variable. Modulefiles for Library packages provide environment variables
+ that specify where the library and header files can be found.
+ """
+ homepage = "https://www.tacc.utexas.edu/research-development/tacc-projects/lmod"
+ url = "http://sourceforge.net/projects/lmod/files/Lmod-6.0.1.tar.bz2/download"
+
+ version('6.0.1', '91abf52fe5033bd419ffe2842ebe7af9')
+
+ depends_on("lua@5.2:")
+
+ def install(self, spec, prefix):
+ # Add our lua to PATH
+ os.environ['PATH'] = spec['lua'].prefix.bin + ';' + os.environ['PATH']
+
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py
index 57c443cc2d..ca8cfc5365 100644
--- a/var/spack/packages/lua/package.py
+++ b/var/spack/repos/builtin/packages/lua/package.py
@@ -6,6 +6,7 @@ class Lua(Package):
homepage = "http://www.lua.org"
url = "http://www.lua.org/ftp/lua-5.1.5.tar.gz"
+ version('5.3.2', '33278c2ab5ee3c1a875be8d55c1ca2a1')
version('5.3.1', '797adacada8d85761c079390ff1d9961')
version('5.3.0', 'a1b0a7e92d0c85bbff7a8d27bf29f8af')
version('5.2.4', '913fdb32207046b273fdb17aad70be13')
@@ -18,9 +19,16 @@ class Lua(Package):
version('5.1.3', 'a70a8dfaa150e047866dc01a46272599')
depends_on('ncurses')
+ depends_on('readline')
def install(self, spec, prefix):
+ if spec.satisfies("=darwin-i686") or spec.satisfies("=darwin-x86_64"):
+ target = 'macosx'
+ else:
+ target = 'linux'
make('INSTALL_TOP=%s' % prefix,
- 'MYLDFLAGS=-L%s/lib' % spec['ncurses'].prefix,
- 'linux',
+ 'MYLDFLAGS=-L%s -lncurses' % spec['ncurses'].prefix.lib,
+ target)
+ make('INSTALL_TOP=%s' % prefix,
+ 'MYLDFLAGS=-L%s -lncurses' % spec['ncurses'].prefix.lib,
'install')
diff --git a/var/spack/packages/lwgrp/package.py b/var/spack/repos/builtin/packages/lwgrp/package.py
index 5963382b92..5963382b92 100644
--- a/var/spack/packages/lwgrp/package.py
+++ b/var/spack/repos/builtin/packages/lwgrp/package.py
diff --git a/var/spack/packages/lwm2/package.py b/var/spack/repos/builtin/packages/lwm2/package.py
index 31afff8816..31afff8816 100644
--- a/var/spack/packages/lwm2/package.py
+++ b/var/spack/repos/builtin/packages/lwm2/package.py
diff --git a/var/spack/repos/builtin/packages/matio/package.py b/var/spack/repos/builtin/packages/matio/package.py
new file mode 100644
index 0000000000..12cfb80926
--- /dev/null
+++ b/var/spack/repos/builtin/packages/matio/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+
+class Matio(Package):
+ """matio is an C library for reading and writing Matlab MAT files"""
+ homepage = "http://sourceforge.net/projects/matio/"
+ url = "http://downloads.sourceforge.net/project/matio/matio/1.5.2/matio-1.5.2.tar.gz"
+
+ version('1.5.2', '85b007b99916c63791f28398f6a4c6f1')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/mbedtls/package.py b/var/spack/repos/builtin/packages/mbedtls/package.py
new file mode 100644
index 0000000000..3da00cf417
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mbedtls/package.py
@@ -0,0 +1,22 @@
+from spack import *
+
+class Mbedtls(Package):
+ """
+ mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint.
+ """
+ homepage = "https://tls.mbed.org"
+ url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
+
+ version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be')
+ version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942')
+ version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae')
+ version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
+ version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0')
+
+ depends_on('cmake')
+
+ def install(self, spec, prefix):
+ cmake('.', *std_cmake_args)
+
+ make()
+ make("install")
diff --git a/var/spack/packages/memaxes/package.py b/var/spack/repos/builtin/packages/memaxes/package.py
index 76d5d3f831..4b1da558a2 100644
--- a/var/spack/packages/memaxes/package.py
+++ b/var/spack/repos/builtin/packages/memaxes/package.py
@@ -3,10 +3,10 @@ from spack import *
class Memaxes(Package):
"""MemAxes is a visualizer for sampled memory trace data."""
- homepage = "https://github.com/scalability-llnl/MemAxes"
+ homepage = "https://github.com/llnl/MemAxes"
version('0.5', '5874f3fda9fd2d313c0ff9684f915ab5',
- url='https://github.com/scalability-llnl/MemAxes/archive/v0.5.tar.gz')
+ url='https://github.com/llnl/MemAxes/archive/v0.5.tar.gz')
depends_on("cmake@2.8.9:")
depends_on("qt@5:")
@@ -16,4 +16,3 @@ class Memaxes(Package):
cmake('..', *std_cmake_args)
make()
make("install")
-
diff --git a/var/spack/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py
index 2a04a8fd51..62da8c993b 100644
--- a/var/spack/packages/mesa/package.py
+++ b/var/spack/repos/builtin/packages/mesa/package.py
@@ -14,7 +14,7 @@ class Mesa(Package):
# mesa 7.x, 8.x, 9.x
depends_on("libdrm@2.4.33")
depends_on("llvm@3.0")
- depends_on("libxml2")
+ depends_on("libxml2+python")
# patch("llvm-fixes.patch") # using newer llvm
diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py
new file mode 100644
index 0000000000..bbfc4de7d1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/metis/package.py
@@ -0,0 +1,83 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Metis(Package):
+ """
+ METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, and producing fill
+ reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the multilevel
+ recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes.
+ """
+
+ homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview'
+ url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz"
+
+ version('5.1.0', '5465e67079419a69e0116de24fce58fe')
+
+ variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('debug', default=False, description='Builds the library in debug mode')
+ variant('gdb', default=False, description='Enables gdb support')
+
+ variant('idx64', default=False, description='Use int64_t as default index type')
+ variant('double', default=False, description='Use double precision floating point types')
+
+ depends_on('cmake @2.8:') # build-time dependency
+
+ depends_on('gdb', when='+gdb')
+
+ def install(self, spec, prefix):
+
+ options = []
+ options.extend(std_cmake_args)
+
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+
+ options.append('-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=source_directory))
+
+ if '+shared' in spec:
+ options.append('-DSHARED:BOOL=ON')
+
+ if '+debug' in spec:
+ options.extend(['-DDEBUG:BOOL=ON',
+ '-DCMAKE_BUILD_TYPE:STRING=Debug'])
+
+ if '+gdb' in spec:
+ options.append('-DGDB:BOOL=ON')
+
+ metis_header = join_path(source_directory, 'include', 'metis.h')
+
+ if '+idx64' in spec:
+ filter_file('IDXTYPEWIDTH 32', 'IDXTYPEWIDTH 64', metis_header)
+
+ if '+double' in spec:
+ filter_file('REALTYPEWIDTH 32', 'REALTYPEWIDTH 64', metis_header)
+
+ with working_dir(build_directory, create=True):
+ cmake(source_directory, *options)
+ make()
+ make("install") \ No newline at end of file
diff --git a/var/spack/packages/mpc/package.py b/var/spack/repos/builtin/packages/mpc/package.py
index 6fbfca3007..50477a0ccb 100644
--- a/var/spack/packages/mpc/package.py
+++ b/var/spack/repos/builtin/packages/mpc/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -31,6 +31,7 @@ class Mpc(Package):
homepage = "http://www.multiprecision.org"
url = "ftp://ftp.gnu.org/gnu/mpc/mpc-1.0.2.tar.gz"
+ version('1.0.3', 'd6a1d5f8ddea3abd2cc3e98f58352d26')
version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3')
depends_on("gmp")
diff --git a/var/spack/packages/mpe2/mpe2.patch b/var/spack/repos/builtin/packages/mpe2/mpe2.patch
index 3ade1f04f4..3ade1f04f4 100644
--- a/var/spack/packages/mpe2/mpe2.patch
+++ b/var/spack/repos/builtin/packages/mpe2/mpe2.patch
diff --git a/var/spack/packages/mpe2/package.py b/var/spack/repos/builtin/packages/mpe2/package.py
index 27295172cc..27295172cc 100644
--- a/var/spack/packages/mpe2/package.py
+++ b/var/spack/repos/builtin/packages/mpe2/package.py
diff --git a/var/spack/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py
index 9c744a22df..a1bd7529cf 100644
--- a/var/spack/packages/mpfr/package.py
+++ b/var/spack/repos/builtin/packages/mpfr/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -31,7 +31,7 @@ class Mpfr(Package):
url = "http://www.mpfr.org/mpfr-current/mpfr-3.1.3.tar.bz2"
version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138')
- # version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19')
+ version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19')
depends_on('gmp')
diff --git a/var/spack/packages/mpibash/mpibash-4.3.patch b/var/spack/repos/builtin/packages/mpibash/mpibash-4.3.patch
index 17e285b0bf..17e285b0bf 100644
--- a/var/spack/packages/mpibash/mpibash-4.3.patch
+++ b/var/spack/repos/builtin/packages/mpibash/mpibash-4.3.patch
diff --git a/var/spack/packages/mpibash/package.py b/var/spack/repos/builtin/packages/mpibash/package.py
index d0f6dafed6..d0f6dafed6 100644
--- a/var/spack/packages/mpibash/package.py
+++ b/var/spack/repos/builtin/packages/mpibash/package.py
diff --git a/var/spack/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index dfff22152d..405e223b5a 100644
--- a/var/spack/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -33,10 +33,18 @@ class Mpich(Package):
list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2
+ version('3.2', 'f414cfa77099cd1fa1a5ae4e22db508a')
+ version('3.1.4', '2ab544607986486562e076b83937bba2')
+ version('3.1.3', '93cb17f91ac758cbf9174ecb03563778')
+ version('3.1.2', '7fbf4b81dcb74b07ae85939d1ceee7f1')
+ version('3.1.1', '40dc408b1e03cc36d80209baaa2d32b7')
+ version('3.1', '5643dd176499bfb7d25079aaff25f2ec')
version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0')
- provides('mpi@:3', when='@3:')
- provides('mpi@:1', when='@1:')
+ variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
+
+ provides('mpi@:3.0', when='@3:')
+ provides('mpi@:1.3', when='@1:')
def setup_dependent_environment(self, module, spec, dep_spec):
"""For dependencies, make mpicc's use spack wrapper."""
@@ -51,6 +59,12 @@ class Mpich(Package):
config_args = ["--prefix=" + prefix,
"--enable-shared"]
+ # Variants
+ if '+verbs' in spec:
+ config_args.append("--with-ibverbs")
+ else:
+ config_args.append("--without-ibverbs")
+
# TODO: Spack should make it so that you can't actually find
# these compilers if they're "disabled" for the current
# compiler configuration.
@@ -81,8 +95,13 @@ class Mpich(Package):
mpif77 = os.path.join(bin, 'mpif77')
mpif90 = os.path.join(bin, 'mpif90')
+ spack_cc = os.environ['CC']
+ spack_cxx = os.environ['CXX']
+ spack_f77 = os.environ['F77']
+ spack_fc = os.environ['FC']
+
kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : True }
- filter_file('CC="cc"', 'CC="%s"' % self.compiler.cc, mpicc, **kwargs)
- filter_file('CXX="c++"', 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs)
- filter_file('F77="f77"', 'F77="%s"' % self.compiler.f77, mpif77, **kwargs)
- filter_file('FC="f90"', 'FC="%s"' % self.compiler.fc, mpif90, **kwargs)
+ filter_file('CC="%s"' % spack_cc , 'CC="%s"' % self.compiler.cc, mpicc, **kwargs)
+ filter_file('CXX="%s"'% spack_cxx, 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs)
+ filter_file('F77="%s"'% spack_f77, 'F77="%s"' % self.compiler.f77, mpif77, **kwargs)
+ filter_file('FC="%s"' % spack_fc , 'FC="%s"' % self.compiler.fc, mpif90, **kwargs)
diff --git a/var/spack/packages/mpileaks/package.py b/var/spack/repos/builtin/packages/mpileaks/package.py
index 4ef866588c..661d9d66bf 100644
--- a/var/spack/packages/mpileaks/package.py
+++ b/var/spack/repos/builtin/packages/mpileaks/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/repos/builtin/packages/mrnet/package.py b/var/spack/repos/builtin/packages/mrnet/package.py
new file mode 100644
index 0000000000..fed944e45f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mrnet/package.py
@@ -0,0 +1,26 @@
+from spack import *
+
+class Mrnet(Package):
+ """The MRNet Multi-Cast Reduction Network."""
+ homepage = "http://paradyn.org/mrnet"
+ url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_4.0.0.tar.gz"
+
+ version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
+ version('4.1.0', '5a248298b395b329e2371bf25366115c')
+ version('5.0.1', '17f65738cf1b9f9b95647ff85f69ecdd')
+
+ variant('lwthreads', default=False, description="Also build the MRNet LW threadsafe libraries")
+ parallel = False
+
+ depends_on("boost")
+
+ def install(self, spec, prefix):
+ # Build the MRNet LW thread safe libraries when the krelloptions variant is present
+ if '+lwthreads' in spec:
+ configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe")
+ else:
+ configure("--prefix=%s" %prefix, "--enable-shared")
+
+ make()
+ make("install")
+
diff --git a/var/spack/repos/builtin/packages/mumps/Makefile.inc b/var/spack/repos/builtin/packages/mumps/Makefile.inc
new file mode 100644
index 0000000000..2e6a041878
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mumps/Makefile.inc
@@ -0,0 +1,38 @@
+LPORDDIR = $(topdir)/PORD/lib/
+IPORD = -I$(topdir)/PORD/include/
+LPORD = -L$(LPORDDIR) -lpord
+
+ORDERINGSC = $(ORDERINGSF)
+LORDERINGS = $(LMETIS) $(LPORD) $(LSCOTCH)
+IORDERINGSF = $(ISCOTCH)
+IORDERINGSC = $(IMETIS) $(IPORD) $(ISCOTCH)
+
+PLAT =
+LIBEXT = .a
+OUTC = -o
+OUTF = -o
+RM = /bin/rm -f
+AR = ar vr
+RANLIB = ranlib
+
+INCSEQ = -I$(topdir)/libseq
+LIBSEQ = -L$(topdir)/libseq -lmpiseq
+
+INCPAR =
+LIBPAR = $(SCALAP)
+
+LIBOTHERS = -lpthread
+
+#Sequential:
+ifeq ($(MUMPS_TYPE),seq)
+INCS = $(INCSEQ)
+LIBS = $(LIBSEQ)
+LIBSEQNEEDED = libseqneeded
+endif
+
+#Parallel:
+ifeq ($(MUMPS_TYPE),par)
+INCS = $(INCPAR)
+LIBS = $(LIBPAR)
+LIBSEQNEEDED =
+endif
diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py
new file mode 100644
index 0000000000..44a37903cc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mumps/package.py
@@ -0,0 +1,139 @@
+from spack import *
+import os
+
+
+class Mumps(Package):
+ """MUMPS: a MUltifrontal Massively Parallel sparse direct Solver"""
+
+ homepage = "http://mumps.enseeiht.fr"
+ url = "http://mumps.enseeiht.fr/MUMPS_5.0.1.tar.gz"
+
+ version('5.0.1', 'b477573fdcc87babe861f62316833db0')
+
+ variant('mpi', default=True, description='Activate the compilation of MUMPS with the MPI support')
+ variant('scotch', default=False, description='Activate Scotch as a possible ordering library')
+ variant('ptscotch', default=False, description='Activate PT-Scotch as a possible ordering library')
+ variant('metis', default=False, description='Activate Metis as a possible ordering library')
+ variant('parmetis', default=False, description='Activate Parmetis as a possible ordering library')
+ variant('double', default=True, description='Activate the compilation of dmumps')
+ variant('float', default=True, description='Activate the compilation of smumps')
+ variant('complex', default=True, description='Activate the compilation of cmumps and/or zmumps')
+ variant('idx64', default=False, description='Use int64_t/integer*8 as default index type')
+
+
+ depends_on('scotch + esmumps', when='~ptscotch+scotch')
+ depends_on('scotch + esmumps + mpi', when='+ptscotch')
+ depends_on('metis', when='~parmetis+metis')
+ depends_on('parmetis', when="+parmetis")
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('scalapack', when='+mpi')
+ depends_on('mpi', when='+mpi')
+
+ # this function is not a patch function because in case scalapack
+ # is needed it uses self.spec['scalapack'].fc_link set by the
+ # setup_dependent_environment in scalapck. This happen after patch
+ # end before install
+ # def patch(self):
+ def write_makefile_inc(self):
+ if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec:
+ raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi')
+
+ makefile_conf = ["LIBBLAS = -L%s -lblas" % self.spec['blas'].prefix.lib]
+
+ orderings = ['-Dpord']
+
+ if '+ptscotch' in self.spec or '+scotch' in self.spec:
+ join_lib = ' -l%s' % ('pt' if '+ptscotch' in self.spec else '')
+ makefile_conf.extend(
+ ["ISCOTCH = -I%s" % self.spec['scotch'].prefix.include,
+ "LSCOTCH = -L%s %s%s" % (self.spec['scotch'].prefix.lib,
+ join_lib,
+ join_lib.join(['esmumps', 'scotch', 'scotcherr']))])
+ orderings.append('-Dscotch')
+ if '+ptscotch' in self.spec:
+ orderings.append('-Dptscotch')
+
+ if '+parmetis' in self.spec or '+metis' in self.spec:
+ libname = 'parmetis' if '+parmetis' in self.spec else 'metis'
+ makefile_conf.extend(
+ ["IMETIS = -I%s" % self.spec[libname].prefix.include,
+ "LMETIS = -L%s -l%s" % (self.spec[libname].prefix.lib, libname)])
+
+ orderings.append('-Dmetis')
+ if '+parmetis' in self.spec:
+ orderings.append('-Dparmetis')
+
+ makefile_conf.append("ORDERINGSF = %s" % (' '.join(orderings)))
+
+ # TODO: test this part, it needs a full blas, scalapack and
+ # partitionning environment with 64bit integers
+ if '+idx64' in self.spec:
+ makefile_conf.extend(
+ # the fortran compilation flags most probably are
+ # working only for intel and gnu compilers this is
+ # perhaps something the compiler should provide
+ ['OPTF = -O -DALLOW_NON_INIT %s' % '-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8',
+ 'OPTL = -O ',
+ 'OPTC = -O -DINTSIZE64'])
+ else:
+ makefile_conf.extend(
+ ['OPTF = -O -DALLOW_NON_INIT',
+ 'OPTL = -O ',
+ 'OPTC = -O '])
+
+
+ if '+mpi' in self.spec:
+ makefile_conf.extend(
+ ["CC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpicc'),
+ "FC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
+ "FL = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
+ "SCALAP = %s" % self.spec['scalapack'].fc_link,
+ "MUMPS_TYPE = par"])
+ else:
+ makefile_conf.extend(
+ ["CC = cc",
+ "FC = fc",
+ "FL = fc",
+ "MUMPS_TYPE = seq"])
+
+ # TODO: change the value to the correct one according to the
+ # compiler possible values are -DAdd_, -DAdd__ and/or -DUPPER
+ makefile_conf.append("CDEFS = -DAdd_")
+
+
+ makefile_inc_template = join_path(os.path.dirname(self.module.__file__),
+ 'Makefile.inc')
+ with open(makefile_inc_template, "r") as fh:
+ makefile_conf.extend(fh.read().split('\n'))
+
+ with working_dir('.'):
+ with open("Makefile.inc", "w") as fh:
+ makefile_inc = '\n'.join(makefile_conf)
+ fh.write(makefile_inc)
+
+
+
+ def install(self, spec, prefix):
+ make_libs = []
+
+ # the coice to compile ?examples is to have kind of a sanity
+ # check on the libraries generated.
+ if '+float' in spec:
+ make_libs.append('sexamples')
+ if '+complex' in spec:
+ make_libs.append('cexamples')
+
+ if '+double' in spec:
+ make_libs.append('dexamples')
+ if '+complex' in spec:
+ make_libs.append('zexamples')
+
+ self.write_makefile_inc()
+
+ make(*make_libs)
+
+ install_tree('lib', prefix.lib)
+ install_tree('include', prefix.include)
+ if '~mpi' in spec:
+ install('libseq/libmpiseq.a', prefix.lib)
diff --git a/var/spack/packages/munge/package.py b/var/spack/repos/builtin/packages/munge/package.py
index c737ca0354..c737ca0354 100644
--- a/var/spack/packages/munge/package.py
+++ b/var/spack/repos/builtin/packages/munge/package.py
diff --git a/var/spack/packages/muster/package.py b/var/spack/repos/builtin/packages/muster/package.py
index 722daf3d7f..0dc2e5e086 100644
--- a/var/spack/packages/muster/package.py
+++ b/var/spack/repos/builtin/packages/muster/package.py
@@ -7,8 +7,8 @@ class Muster(Package):
for performance data analysis on systems with very large
numbers of processes.
"""
- homepage = "https://github.com/scalability-llnl/muster"
- url = "https://github.com/scalability-llnl/muster/archive/v1.0.tar.gz"
+ homepage = "https://github.com/llnl/muster"
+ url = "https://github.com/llnl/muster/archive/v1.0.tar.gz"
version('1.0.1', 'd709787db7e080447afb6571ac17723c')
version('1.0', '2eec6979a4a36d3a65a792d12969be16')
diff --git a/var/spack/packages/mvapich2/ad_lustre_rwcontig_open_source.patch b/var/spack/repos/builtin/packages/mvapich2/ad_lustre_rwcontig_open_source.patch
index ff85845cf8..ff85845cf8 100644
--- a/var/spack/packages/mvapich2/ad_lustre_rwcontig_open_source.patch
+++ b/var/spack/repos/builtin/packages/mvapich2/ad_lustre_rwcontig_open_source.patch
diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py
new file mode 100644
index 0000000000..af5ed1b088
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mvapich2/package.py
@@ -0,0 +1,198 @@
+from spack import *
+import os
+
+class Mvapich2(Package):
+ """MVAPICH2 is an MPI implementation for Infiniband networks."""
+ homepage = "http://mvapich.cse.ohio-state.edu/"
+ url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2b.tar.gz"
+
+ version('2.2b', '5651e8b7a72d7c77ca68da48f3a5d108')
+ version('2.2a', 'b8ceb4fc5f5a97add9b3ff1b9cbe39d2')
+ version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
+ version('1.9', '5dc58ed08fd3142c260b70fe297e127c')
+
+ patch('ad_lustre_rwcontig_open_source.patch', when='@1.9')
+
+ provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2
+ provides('mpi@:3.0', when='@2.0:') # MVAPICH2-2.0 supports MPI 3.0
+
+ variant('debug', default=False, description='Enables debug information and error messages at run-time')
+
+ ##########
+ # TODO : Process managers should be grouped into the same variant, as soon as variant capabilities will be extended
+ # See https://groups.google.com/forum/#!topic/spack/F8-f8B4_0so
+ SLURM = 'slurm'
+ HYDRA = 'hydra'
+ GFORKER = 'gforker'
+ REMSHELL = 'remshell'
+ SLURM_INCOMPATIBLE_PMS = (HYDRA, GFORKER, REMSHELL)
+ variant(SLURM, default=False, description='Sets slurm as the only process manager')
+ variant(HYDRA, default=False, description='Sets hydra as one of the process managers')
+ variant(GFORKER, default=False, description='Sets gforker as one of the process managers')
+ variant(REMSHELL, default=False, description='Sets remshell as one of the process managers')
+ ##########
+
+ ##########
+ # TODO : Network types should be grouped into the same variant, as soon as variant capabilities will be extended
+ PSM = 'psm'
+ SOCK = 'sock'
+ NEMESISIBTCP = 'nemesisibtcp'
+ NEMESISIB = 'nemesisib'
+ NEMESIS = 'nemesis'
+ MRAIL = 'mrail'
+ SUPPORTED_NETWORKS = (PSM, SOCK, NEMESIS, NEMESISIB, NEMESISIBTCP)
+ variant(PSM, default=False, description='Configures a build for QLogic PSM-CH3')
+ variant(SOCK, default=False, description='Configures a build for TCP/IP-CH3')
+ variant(NEMESISIBTCP, default=False, description='Configures a build for both OFA-IB-Nemesis and TCP/IP-Nemesis')
+ variant(NEMESISIB, default=False, description='Configures a build for OFA-IB-Nemesis')
+ variant(NEMESIS, default=False, description='Configures a build for TCP/IP-Nemesis')
+ variant(MRAIL, default=False, description='Configures a build for OFA-IB-CH3')
+ ##########
+
+ # FIXME : CUDA support is missing
+
+ def url_for_version(self, version):
+ base_url = "http://mvapich.cse.ohio-state.edu/download"
+ if version < Version('2.0'):
+ return "%s/mvapich2/mv2/mvapich2-%s.tar.gz" % (base_url, version)
+ else:
+ return "%s/mvapich/mv2/mvapich2-%s.tar.gz" % (base_url, version)
+
+ @staticmethod
+ def enabled(x):
+ """
+ Given a variant name returns the string that means the variant is enabled
+
+ :param x: variant name
+ :return:
+ """
+ return '+' + x
+
+ def set_build_type(self, spec, configure_args):
+ """
+ Appends to configure_args the flags that depends only on the build type (i.e. release or debug)
+
+ :param spec: spec
+ :param configure_args: list of current configure arguments
+ """
+ if '+debug' in spec:
+ build_type_options = [
+ "--disable-fast",
+ "--enable-error-checking=runtime",
+ "--enable-error-messages=all",
+ "--enable-g=dbg", "--enable-debuginfo" # Permits debugging with TotalView
+ ]
+ else:
+ build_type_options = ["--enable-fast=all"]
+
+ configure_args.extend(build_type_options)
+
+ def set_process_manager(self, spec, configure_args):
+ """
+ Appends to configure_args the flags that will enable the appropriate process managers
+
+ :param spec: spec
+ :param configure_args: list of current configure arguments
+ """
+ # Check that slurm variant is not activated together with other pm variants
+ has_slurm_incompatible_variants = any(self.enabled(x) in spec for x in Mvapich2.SLURM_INCOMPATIBLE_PMS)
+ if self.enabled(Mvapich2.SLURM) in spec and has_slurm_incompatible_variants:
+ raise RuntimeError(" %s : 'slurm' cannot be activated together with other process managers" % self.name)
+
+ process_manager_options = []
+ if self.enabled(Mvapich2.SLURM) in spec:
+ process_manager_options = [
+ "--with-pm=slurm"
+ ]
+ elif has_slurm_incompatible_variants:
+ pms = []
+ # The variant name is equal to the process manager name in the configuration options
+ for x in Mvapich2.SLURM_INCOMPATIBLE_PMS:
+ if self.enabled(x) in spec:
+ pms.append(x)
+ process_manager_options = [
+ "--with-pm=%s" % ':'.join(pms)
+ ]
+ configure_args.extend(process_manager_options)
+
+ def set_network_type(self, spec, configure_args):
+ # Check that at most one variant has been activated
+ count = 0
+ for x in Mvapich2.SUPPORTED_NETWORKS:
+ if self.enabled(x) in spec:
+ count += 1
+ if count > 1:
+ raise RuntimeError('network variants are mutually exclusive (only one can be selected at a time)')
+
+ # From here on I can suppose that only one variant has been selected
+ if self.enabled(Mvapich2.PSM) in spec:
+ network_options = ["--with-device=ch3:psm"]
+ elif self.enabled(Mvapich2.SOCK) in spec:
+ network_options = ["--with-device=ch3:sock"]
+ elif self.enabled(Mvapich2.NEMESISIBTCP) in spec:
+ network_options = ["--with-device=ch3:nemesis:ib,tcp"]
+ elif self.enabled(Mvapich2.NEMESISIB) in spec:
+ network_options = ["--with-device=ch3:nemesis:ib"]
+ elif self.enabled(Mvapich2.NEMESIS) in spec:
+ network_options = ["--with-device=ch3:nemesis"]
+ elif self.enabled(Mvapich2.MRAIL) in spec:
+ network_options = ["--with-device=ch3:mrail", "--with-rdma=gen2"]
+
+ configure_args.extend(network_options)
+
+ def install(self, spec, prefix):
+ # we'll set different configure flags depending on our environment
+ configure_args = [
+ "--prefix=%s" % prefix,
+ "--enable-shared",
+ "--enable-romio",
+ "--disable-silent-rules",
+ ]
+
+ if self.compiler.f77 and self.compiler.fc:
+ configure_args.append("--enable-fortran=all")
+ elif self.compiler.f77:
+ configure_args.append("--enable-fortran=f77")
+ elif self.compiler.fc:
+ configure_args.append("--enable-fortran=fc")
+ else:
+ configure_args.append("--enable-fortran=none")
+
+ # Set the type of the build (debug, release)
+ self.set_build_type(spec, configure_args)
+ # Set the process manager
+ self.set_process_manager(spec, configure_args)
+ # Determine network type by variant
+ self.set_network_type(spec, configure_args)
+
+ configure(*configure_args)
+ make()
+ make("install")
+
+ self.filter_compilers()
+
+
+ def filter_compilers(self):
+ """Run after install to make the MPI compilers use the
+ compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC, CXX, F77, and FC set
+ to Spack's generic cc, c++, f77, and f90. We want them to
+ be bound to whatever compiler they were built with.
+ """
+ bin = self.prefix.bin
+ mpicc = os.path.join(bin, 'mpicc')
+ mpicxx = os.path.join(bin, 'mpicxx')
+ mpif77 = os.path.join(bin, 'mpif77')
+ mpif90 = os.path.join(bin, 'mpif90')
+
+ spack_cc = os.environ['CC']
+ spack_cxx = os.environ['CXX']
+ spack_f77 = os.environ['F77']
+ spack_fc = os.environ['FC']
+
+ kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : True }
+ filter_file('CC="%s"' % spack_cc , 'CC="%s"' % self.compiler.cc, mpicc, **kwargs)
+ filter_file('CXX="%s"'% spack_cxx, 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs)
+ filter_file('F77="%s"'% spack_f77, 'F77="%s"' % self.compiler.f77, mpif77, **kwargs)
+ filter_file('FC="%s"' % spack_fc , 'FC="%s"' % self.compiler.fc, mpif90, **kwargs)
diff --git a/var/spack/packages/nasm/package.py b/var/spack/repos/builtin/packages/nasm/package.py
index 933b6a62c5..933b6a62c5 100644
--- a/var/spack/packages/nasm/package.py
+++ b/var/spack/repos/builtin/packages/nasm/package.py
diff --git a/var/spack/repos/builtin/packages/ncdu/package.py b/var/spack/repos/builtin/packages/ncdu/package.py
new file mode 100644
index 0000000000..234f9730d6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ncdu/package.py
@@ -0,0 +1,28 @@
+from spack import *
+
+class Ncdu(Package):
+ """
+ Ncdu is a disk usage analyzer with an ncurses interface. It is designed
+ to find space hogs on a remote server where you don't have an entire
+ gaphical setup available, but it is a useful tool even on regular desktop
+ systems. Ncdu aims to be fast, simple and easy to use, and should be able
+ to run in any minimal POSIX-like environment with ncurses installed.
+ """
+
+ homepage = "http://dev.yorhel.nl/ncdu"
+ url = "http://dev.yorhel.nl/download/ncdu-1.11.tar.gz"
+
+ version('1.11', '9e44240a5356b029f05f0e70a63c4d12')
+ version('1.10', '7535decc8d54eca811493e82d4bfab2d')
+ version('1.9' , '93258079db897d28bb8890e2db89b1fb')
+ version('1.8' , '94d7a821f8a0d7ba8ef3dd926226f7d5')
+ version('1.7' , '172047c29d232724cc62e773e82e592a')
+
+ depends_on("ncurses")
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix,
+ '--with-ncurses=%s' % spec['ncurses'])
+
+ make()
+ make("install")
diff --git a/var/spack/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py
index 8f5763bfdd..8dc808caac 100644
--- a/var/spack/packages/ncurses/package.py
+++ b/var/spack/repos/builtin/packages/ncurses/package.py
@@ -11,21 +11,20 @@ class Ncurses(Package):
version('5.9', '8cb9c412e5f2d96bc6f459aa8c6282a1',
url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-5.9.tar.gz')
+ version('6.0', 'ee13d052e1ead260d7c28071f46eefb1',
+ url='http://ftp.gnu.org/pub/gnu/ncurses/ncurses-6.0.tar.gz')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix,
- "--with-shared",
- "--enable-widec",
- "--disable-pc-files",
- "--without-ada")
- make()
- make("install")
+ patch('patch_gcc_5.txt', when='%gcc@5.0:')
- configure("--prefix=%s" % prefix,
- "--with-shared",
- "--disable-widec",
- "--disable-pc-files",
- "--without-ada")
+ def install(self, spec, prefix):
+ opts = [
+ "--prefix=%s" % prefix,
+ "--with-shared",
+ "--with-cxx-shared",
+ "--enable-widec",
+ "--enable-overwrite",
+ "--disable-lib-suffixes",
+ "--without-ada"]
+ configure(*opts)
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/ncurses/patch_gcc_5.txt b/var/spack/repos/builtin/packages/ncurses/patch_gcc_5.txt
new file mode 100644
index 0000000000..f85e07cb8d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ncurses/patch_gcc_5.txt
@@ -0,0 +1,12 @@
+diff -Naur ncurses-6.0/ncurses/Makefile.in ncurses-6.0-patched/ncurses/Makefile.in
+--- ncurses-6.0/ncurses/Makefile.in 2015-08-06 01:15:41.000000000 +0200
++++ ncurses-6.0-patched/ncurses/Makefile.in 2015-12-15 14:58:52.710199407 +0100
+@@ -219,7 +219,7 @@
+ $(SHELL) -e $(tinfo)/MKfallback.sh $(TERMINFO) $(TERMINFO_SRC) $(TIC_PATH) $(FALLBACK_LIST) >$@
+
+ ./lib_gen.c : $(base)/MKlib_gen.sh ../include/curses.h
+- $(SHELL) -e $(base)/MKlib_gen.sh "$(CPP) $(CPPFLAGS)" "$(AWK)" generated <../include/curses.h >$@
++ $(SHELL) -e $(base)/MKlib_gen.sh "$(CPP) $(CPPFLAGS) -P" "$(AWK)" generated <../include/curses.h >$@
+
+ init_keytry.h: make_keys$(BUILD_EXEEXT) keys.list
+ ./make_keys$(BUILD_EXEEXT) keys.list > $@
diff --git a/var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch b/var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch
new file mode 100644
index 0000000000..46dda5fc9d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/netcdf/netcdf-4.3.3-mpi.patch
@@ -0,0 +1,25 @@
+diff -Nur netcdf-4.3.3/CMakeLists.txt netcdf-4.3.3.mpi/CMakeLists.txt
+--- netcdf-4.3.3/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500
++++ netcdf-4.3.3.mpi/CMakeLists.txt 2015-10-14 16:44:41.176300658 -0400
+@@ -753,6 +753,7 @@
+ SET(USE_PARALLEL OFF CACHE BOOL "")
+ MESSAGE(STATUS "Cannot find HDF5 library built with parallel support. Disabling parallel build.")
+ ELSE()
++ FIND_PACKAGE(MPI REQUIRED)
+ SET(USE_PARALLEL ON CACHE BOOL "")
+ SET(STATUS_PARALLEL "ON")
+ ENDIF()
+diff -Nur netcdf-4.3.3/liblib/CMakeLists.txt netcdf-4.3.3.mpi/liblib/CMakeLists.txt
+--- netcdf-4.3.3/liblib/CMakeLists.txt 2015-02-12 16:44:35.000000000 -0500
++++ netcdf-4.3.3.mpi/liblib/CMakeLists.txt 2015-10-14 16:44:57.757793634 -0400
+@@ -71,6 +71,10 @@
+ SET(TLL_LIBS ${TLL_LIBS} ${CURL_LIBRARY})
+ ENDIF()
+
++IF(USE_PARALLEL)
++ SET(TLL_LIBS ${TLL_LIBS} ${MPI_C_LIBRARIES})
++ENDIF()
++
+ IF(USE_HDF4)
+ SET(TLL_LIBS ${TLL_LIBS} ${HDF4_LIBRARIES})
+ ENDIF()
diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py
new file mode 100644
index 0000000000..239644d894
--- /dev/null
+++ b/var/spack/repos/builtin/packages/netcdf/package.py
@@ -0,0 +1,28 @@
+from spack import *
+
+class Netcdf(Package):
+ """NetCDF is a set of software libraries and self-describing, machine-independent
+ data formats that support the creation, access, and sharing of array-oriented
+ scientific data."""
+
+ homepage = "http://www.unidata.ucar.edu/software/netcdf/"
+ url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz"
+
+ version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae')
+
+ patch('netcdf-4.3.3-mpi.patch')
+
+ # Dependencies:
+ depends_on("cmake @2.8.12:")
+ # >HDF5
+ depends_on("hdf5")
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake('..',
+ "-DCMAKE_INSTALL_PREFIX:PATH=%s" % prefix,
+ "-DENABLE_DAP:BOOL=OFF", # Disable DAP.
+ "-DBUILD_SHARED_LIBS:BOOL=OFF") # Don't build shared libraries (use static libs).
+
+ make()
+ make("install")
diff --git a/var/spack/packages/netgauge/package.py b/var/spack/repos/builtin/packages/netgauge/package.py
index c2378b0718..0ea42175c6 100644
--- a/var/spack/packages/netgauge/package.py
+++ b/var/spack/repos/builtin/packages/netgauge/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/netlib-blas/package.py b/var/spack/repos/builtin/packages/netlib-blas/package.py
index 85e97323d3..85e97323d3 100644
--- a/var/spack/packages/netlib-blas/package.py
+++ b/var/spack/repos/builtin/packages/netlib-blas/package.py
diff --git a/var/spack/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py
index fb6b99e27c..fb6b99e27c 100644
--- a/var/spack/packages/netlib-lapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py
diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
new file mode 100644
index 0000000000..5be91c4a40
--- /dev/null
+++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
@@ -0,0 +1,50 @@
+from spack import *
+
+class NetlibScalapack(Package):
+ """ScaLAPACK is a library of high-performance linear algebra routines for parallel distributed memory machines"""
+
+ homepage = "http://www.netlib.org/scalapack/"
+ url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
+
+ version('2.0.2', '2f75e600a2ba155ed9ce974a1c4b536f')
+ version('2.0.1', '17b8cde589ea0423afe1ec43e7499161')
+ version('2.0.0', '9e76ae7b291be27faaad47cfc256cbfe')
+ # versions before 2.0.0 are not using cmake and requires blacs as
+ # a separated package
+
+ variant('shared', default=True, description='Build the shared library version')
+ variant('fpic', default=False, description="Build with -fpic compiler option")
+
+ provides('scalapack')
+
+ depends_on('mpi')
+ depends_on('lapack')
+
+ def install(self, spec, prefix):
+ options = [
+ "-DBUILD_SHARED_LIBS:BOOL=%s" % 'ON' if '+shared' in spec else 'OFF',
+ "-DBUILD_STATIC_LIBS:BOOL=%s" % 'OFF' if '+shared' in spec else 'ON',
+ "-DUSE_OPTIMIZED_LAPACK_BLAS:BOOL=ON", # forces scalapack to use find_package(LAPACK)
+ ]
+
+ if '+fpic' in spec:
+ options.extend([
+ "-DCMAKE_C_FLAGS=-fPIC",
+ "-DCMAKE_Fortran_FLAGS=-fPIC"
+ ])
+
+ options.extend(std_cmake_args)
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
+ make()
+ make("install")
+
+ def setup_dependent_environment(self, module, spec, dependent_spec):
+ # TODO treat OS that are not Linux...
+ lib_suffix = '.so' if '+shared' in spec['scalapack'] else '.a'
+
+ spec['scalapack'].fc_link = '-L%s -lscalapack' % spec['scalapack'].prefix.lib
+ spec['scalapack'].cc_link = spec['scalapack'].fc_link
+ spec['scalapack'].libraries = [join_path(spec['scalapack'].prefix.lib,
+ 'libscalapack%s' % lib_suffix)]
diff --git a/var/spack/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py
index cd600b0b87..cd600b0b87 100644
--- a/var/spack/packages/nettle/package.py
+++ b/var/spack/repos/builtin/packages/nettle/package.py
diff --git a/var/spack/repos/builtin/packages/ninja/package.py b/var/spack/repos/builtin/packages/ninja/package.py
new file mode 100644
index 0000000000..9e6bf4e358
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ninja/package.py
@@ -0,0 +1,22 @@
+from spack import *
+import os
+
+class Ninja(Package):
+ """ A small, fast Make alternative """
+ homepage = "https://martine.github.io/ninja/"
+ url = "https://github.com/martine/ninja/archive/v1.6.0.tar.gz"
+
+ version('1.6.0', '254133059f2da79d8727f654d7198f43')
+
+ extends('python')
+
+ def install(self, spec, prefix):
+ sh = which('sh')
+ python('configure.py', '--bootstrap')
+
+ cp = which('cp')
+
+ bindir = os.path.join(prefix, 'bin')
+ mkdir(bindir)
+ cp('-a', '-t', bindir, 'ninja')
+ cp('-ra', 'misc', prefix)
diff --git a/var/spack/packages/ompss/package.py b/var/spack/repos/builtin/packages/ompss/package.py
index e09e0a624f..e09e0a624f 100644
--- a/var/spack/packages/ompss/package.py
+++ b/var/spack/repos/builtin/packages/ompss/package.py
diff --git a/var/spack/repos/builtin/packages/ompt-openmp/package.py b/var/spack/repos/builtin/packages/ompt-openmp/package.py
new file mode 100644
index 0000000000..e5bcfb51f0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ompt-openmp/package.py
@@ -0,0 +1,23 @@
+from spack import *
+
+class OmptOpenmp(Package):
+ """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP mirror. This library provides a drop-in replacement of the OpenMP runtimes for GCC, Intel and LLVM/Clang."""
+ homepage = "https://github.com/OpenMPToolsInterface/LLVM-openmp"
+ url = "http://github.com/khuck/LLVM-openmp/archive/v0.1.tar.gz"
+
+ version('0.1', '2334e6a84b52da41b27afd9831ed5370')
+
+ # depends_on("foo")
+
+ def install(self, spec, prefix):
+ with working_dir("runtime/build", create=True):
+
+ # FIXME: Modify the configure line to suit your build system here.
+ cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc,
+ '-DCMAKE_CXX_COMPILER=%s' % self.compiler.cxx,
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '..', *std_cmake_args)
+
+ # FIXME: Add logic to build and install here
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/opari2/package.py b/var/spack/repos/builtin/packages/opari2/package.py
new file mode 100644
index 0000000000..c68978f5c0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/opari2/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+class Opari2(Package):
+ """
+ OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid codes. It surrounds OpenMP directives and
+ runtime library calls with calls to the POMP2 measurement interface. OPARI2 will provide you with a new
+ initialization method that allows for multi-directory and parallel builds as well as the usage of pre-instrumented
+ libraries. Furthermore, an efficient way of tracking parent-child relationships was added. Additionally, we extended
+ OPARI2 to support instrumentation of OpenMP 3.0 tied tasks.
+ """
+
+ homepage = "http://www.vi-hps.org/projects/score-p"
+ url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz"
+
+ version('1.1.4', '245d3d11147a06de77909b0805f530c0',
+ url='http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.4.tar.gz')
+ version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix,
+ "--enable-shared")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
new file mode 100644
index 0000000000..9c8fa1c694
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -0,0 +1,25 @@
+from spack import *
+
+class Openblas(Package):
+ """OpenBLAS: An optimized BLAS library"""
+ homepage = "http://www.openblas.net"
+ url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+
+ version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
+
+ # virtual dependency
+ provides('blas')
+ provides('lapack')
+
+ def install(self, spec, prefix):
+ make('libs', 'netlib', 'shared', 'CC=cc', 'FC=f77')
+ make('install', "PREFIX='%s'" % prefix)
+
+ # Blas virtual package should provide blas.a and libblas.a
+ with working_dir(prefix.lib):
+ symlink('libopenblas.a', 'blas.a')
+ symlink('libopenblas.a', 'libblas.a')
+
+ # Lapack virtual package should provide liblapack.a
+ with working_dir(prefix.lib):
+ symlink('libopenblas.a', 'liblapack.a')
diff --git a/var/spack/packages/openmpi/ad_lustre_rwcontig_open_source.patch b/var/spack/repos/builtin/packages/openmpi/ad_lustre_rwcontig_open_source.patch
index daa825ccbe..daa825ccbe 100644
--- a/var/spack/packages/openmpi/ad_lustre_rwcontig_open_source.patch
+++ b/var/spack/repos/builtin/packages/openmpi/ad_lustre_rwcontig_open_source.patch
diff --git a/var/spack/repos/builtin/packages/openmpi/configure.patch b/var/spack/repos/builtin/packages/openmpi/configure.patch
new file mode 100644
index 0000000000..18fb42c1d1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openmpi/configure.patch
@@ -0,0 +1,31 @@
+This patch addresses <https://github.com/open-mpi/ompi/issues/576>.
+--- a/configure
++++ b/configure
+@@ -301130,10 +301130,11 @@
+ case ${prev}${p} in
+
+ -L* | -R* | -l*)
+- # Some compilers place space between "-{L,R}" and the path.
++ # Some compilers place space between "-{L,R,l}" and the path.
+ # Remove the space.
+ if test $p = "-L" ||
+- test $p = "-R"; then
++ test $p = "-R" ||
++ test $p = "-l"; then
+ prev=$p
+ continue
+ fi
+@@ -303036,10 +303037,11 @@
+ case ${prev}${p} in
+
+ -L* | -R* | -l*)
+- # Some compilers place space between "-{L,R}" and the path.
++ # Some compilers place space between "-{L,R,l}" and the path.
+ # Remove the space.
+ if test $p = "-L" ||
+- test $p = "-R"; then
++ test $p = "-R" ||
++ test $p = "-l"; then
+ prev=$p
+ continue
+ fi
diff --git a/var/spack/packages/openmpi/llnl-platforms.patch b/var/spack/repos/builtin/packages/openmpi/llnl-platforms.patch
index f515743c4d..f515743c4d 100644
--- a/var/spack/packages/openmpi/llnl-platforms.patch
+++ b/var/spack/repos/builtin/packages/openmpi/llnl-platforms.patch
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
new file mode 100644
index 0000000000..e4484af8c5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -0,0 +1,135 @@
+import os
+
+from spack import *
+
+
+class Openmpi(Package):
+ """Open MPI is a project combining technologies and resources from
+ several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI)
+ in order to build the best MPI library available. A completely
+ new MPI-2 compliant implementation, Open MPI offers advantages
+ for system and software vendors, application developers and
+ computer science researchers.
+ """
+
+ homepage = "http://www.open-mpi.org"
+ url = "http://www.open-mpi.org/software/ompi/v1.10/downloads/openmpi-1.10.1.tar.bz2"
+ list_url = "http://www.open-mpi.org/software/ompi/"
+ list_depth = 3
+
+ version('1.10.2', 'b2f43d9635d2d52826e5ef9feb97fd4c')
+ version('1.10.1', 'f0fcd77ed345b7eafb431968124ba16e')
+ version('1.10.0', '280cf952de68369cebaca886c5ce0304')
+ version('1.8.8', '0dab8e602372da1425e9242ae37faf8c')
+ version('1.6.5', '03aed2a4aa4d0b27196962a2a65fc475')
+
+ patch('ad_lustre_rwcontig_open_source.patch', when="@1.6.5")
+ patch('llnl-platforms.patch', when="@1.6.5")
+ patch('configure.patch', when="@1.10.0:1.10.1")
+
+ variant('psm', default=False, description='Build support for the PSM library.')
+ variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
+
+ # TODO : variant support for other schedulers is missing
+ variant('tm', default=False, description='Build TM (Torque, PBSPro, and compatible) support')
+
+ provides('mpi@:2.2', when='@1.6.5')
+ provides('mpi@:3.0', when='@1.7.5:')
+
+ depends_on('hwloc')
+
+ def url_for_version(self, version):
+ return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version)
+
+ def setup_dependent_environment(self, module, spec, dep_spec):
+ """For dependencies, make mpicc's use spack wrapper."""
+ os.environ['OMPI_CC'] = 'cc'
+ os.environ['OMPI_CXX'] = 'c++'
+ os.environ['OMPI_FC'] = 'f90'
+ os.environ['OMPI_F77'] = 'f77'
+
+ def install(self, spec, prefix):
+ config_args = ["--prefix=%s" % prefix,
+ "--with-hwloc=%s" % spec['hwloc'].prefix,
+ "--enable-shared",
+ "--enable-static"]
+
+ # Variants
+ if '+tm' in spec:
+ config_args.append("--with-tm") # necessary for Torque support
+
+ if '+psm' in spec:
+ config_args.append("--with-psm")
+
+ if '+verbs' in spec:
+ # Up through version 1.6, this option was previously named --with-openib
+ if spec.satisfies('@:1.6'):
+ config_args.append("--with-openib")
+ # In version 1.7, it was renamed to be --with-verbs
+ elif spec.satisfies('@1.7:'):
+ config_args.append("--with-verbs")
+
+ # TODO: use variants for this, e.g. +lanl, +llnl, etc.
+ # use this for LANL builds, but for LLNL builds, we need:
+ # "--with-platform=contrib/platform/llnl/optimized"
+ if self.version == ver("1.6.5") and '+lanl' in spec:
+ config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas")
+
+ # TODO: Spack should make it so that you can't actually find
+ # these compilers if they're "disabled" for the current
+ # compiler configuration.
+ if not self.compiler.f77 and not self.compiler.fc:
+ config_args.append("--enable-mpi-fortran=no")
+
+ configure(*config_args)
+ make()
+ make("install")
+
+ self.filter_compilers()
+
+ def filter_compilers(self):
+ """Run after install to make the MPI compilers use the
+ compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC, CXX and FC set
+ to Spack's generic cc, c++ and f90. We want them to
+ be bound to whatever compiler they were built with.
+ """
+ kwargs = {'ignore_absent': True, 'backup': False, 'string': False}
+ dir = os.path.join(self.prefix, 'share/openmpi/')
+
+ cc_wrappers = ['mpicc-vt-wrapper-data.txt', 'mpicc-wrapper-data.txt',
+ 'ortecc-wrapper-data.txt', 'shmemcc-wrapper-data.txt']
+
+ cxx_wrappers = ['mpic++-vt-wrapper-data.txt', 'mpic++-wrapper-data.txt',
+ 'ortec++-wrapper-data.txt']
+
+ fc_wrappers = ['mpifort-vt-wrapper-data.txt',
+ 'mpifort-wrapper-data.txt', 'shmemfort-wrapper-data.txt']
+
+ for wrapper in cc_wrappers:
+ filter_file('compiler=.*', 'compiler=%s' % self.compiler.cc,
+ os.path.join(dir, wrapper), **kwargs)
+
+ for wrapper in cxx_wrappers:
+ filter_file('compiler=.*', 'compiler=%s' % self.compiler.cxx,
+ os.path.join(dir, wrapper), **kwargs)
+
+ for wrapper in fc_wrappers:
+ filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc,
+ os.path.join(dir, wrapper), **kwargs)
+
+ # These are symlinks in newer versions, so check that here
+ f77_wrappers = ['mpif77-vt-wrapper-data.txt', 'mpif77-wrapper-data.txt']
+ f90_wrappers = ['mpif90-vt-wrapper-data.txt', 'mpif90-wrapper-data.txt']
+
+ for wrapper in f77_wrappers:
+ path = os.path.join(dir, wrapper)
+ if not os.path.islink(path):
+ filter_file('compiler=.*', 'compiler=%s' % self.compiler.f77,
+ path, **kwargs)
+ for wrapper in f90_wrappers:
+ path = os.path.join(dir, wrapper)
+ if not os.path.islink(path):
+ filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc,
+ path, **kwargs)
diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py
new file mode 100644
index 0000000000..8c71bcb7c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openspeedshop/package.py
@@ -0,0 +1,216 @@
+################################################################################
+# Copyright (c) 2015 Krell Institute. All Rights Reserved.
+#
+# This program is free software; you can redistribute it and/or modify it under
+# the terms of the GNU General Public License as published by the Free Software
+# Foundation; either version 2 of the License, or (at your option) any later
+# version.
+#
+# This program is distributed in the hope that it will be useful, but WITHOUT
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
+# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
+# details.
+#
+# You should have received a copy of the GNU General Public License along with
+# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
+# Place, Suite 330, Boston, MA 02111-1307 USA
+################################################################################
+
+from spack import *
+
+class Openspeedshop(Package):
+ """OpenSpeedShop is a community effort by The Krell Institute with current direct funding from DOEs NNSA.
+ It builds on top of a broad list of community infrastructures, most notably Dyninst and MRNet from UW,
+ libmonitor from Rice, and PAPI from UTK. OpenSpeedShop is an open source multi platform Linux performance
+ tool which is targeted to support performance analysis of applications running on both single node and
+ large scale IA64, IA32, EM64T, AMD64, PPC, ARM, Blue Gene and Cray platforms. OpenSpeedShop development
+ is hosted by the Krell Institute. The infrastructure and base components of OpenSpeedShop are released
+ as open source code primarily under LGPL.
+ """
+
+
+ homepage = "http://www.openspeedshop.org"
+ url = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.2/openspeedshop-2.2.tar.gz/download"
+ version('2.2', '16cb051179c2038de4e8a845edf1d573')
+
+ #homepage = "http://www.openspeedshop.org"
+ #url = "http://sourceforge.net/projects/openss/files/openss/openspeedshop-2.1/openspeedshop-2.1.tar.gz/download"
+ #version('2.1', 'bdaa57c1a0db9d0c3e0303fd8496c507')
+
+ # optional mirror template
+ #url = "file:/g/g24/jeg/openspeedshop-2.1.tar.gz"
+ #version('2.1', '64ee17166519838c7b94a1adc138e94f')
+
+
+
+ parallel = False
+
+ variant('offline', default=True, description="build with offline instrumentor enabled.")
+ variant('cbtf', default=False, description="build with cbtf instrumentor enabled.")
+ variant('runtime', default=False, description="build only the runtime libraries and collectors.")
+ variant('frontend', default=False, description="build only the front-end tool using the runtime_dir to point to the target build.")
+ variant('cuda', default=False, description="build with cuda packages included.")
+ variant('ptgf', default=False, description="build with the PTGF based gui package enabled.")
+ variant('intelmic', default=False, description="build for the Intel MIC platform.")
+ variant('cray', default=False, description="build for Cray platforms.")
+ variant('bluegene', default=False, description="build for Cray platforms.")
+ variant('rtfe', default=False, description="build for generic cluster platforms that have different processors on the fe and be nodes.")
+
+ # Dependencies for openspeedshop that are common to all the variants of the OpenSpeedShop build
+ depends_on("bison")
+ depends_on("flex")
+ depends_on("binutils@2.24+krellpatch")
+ depends_on("libelf")
+ depends_on("libdwarf")
+ depends_on("sqlite")
+ depends_on("boost@1.50.0")
+ depends_on("dyninst@8.2.1")
+ depends_on("python")
+ depends_on("qt@3.3.8b+krellpatch")
+
+ # Dependencies only for the openspeedshop offline package.
+ depends_on("libunwind", when='+offline')
+ depends_on("papi", when='+offline')
+ depends_on("libmonitor+krellpatch", when='+offline')
+ #depends_on("openmpi+krelloptions", when='+offline')
+ #depends_on("openmpi", when='+offline')
+ #depends_on("mpich", when='+offline')
+
+ # Dependencies only for the openspeedshop cbtf package.
+ depends_on("cbtf", when='+cbtf')
+ depends_on("cbtf-krell", when='+cbtf')
+ depends_on("cbtf-argonavis", when='+cbtf')
+ depends_on("mrnet@4.1.0:+lwthreads", when='+cbtf')
+
+ def install(self, spec, prefix):
+
+ #openmpi_prefix_path = "/opt/openmpi-1.8.2"
+ #mvapich_prefix_path = "/usr/local/tools/mvapich-gnu"
+ #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix,
+ #'-DOPENMPI_DIR=%s' % openmpi_prefix_path,
+ #'-DMVAPICH_DIR=%s' % mvapich_prefix_path,
+
+ # FIXME: How do we make this dynamic in spack? That is, can we specify the paths to cuda dynamically?
+ # WAITING for external package support.
+ #if '+cuda' in spec:
+ # cuda_prefix_path = "/usr/local/cuda-6.0"
+ # cupti_prefix_path = "/usr/local/cuda-6.0/extras/CUPTI"
+
+ if '+offline' in spec:
+ instrumentor_setting = "offline"
+ if '+runtime' in spec:
+ with working_dir('build_runtime', create=True):
+ cmake('..',
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
+ '-DINSTRUMENTOR=%s' % instrumentor_setting,
+ '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
+ '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
+ '-DPAPI_DIR=%s' % spec['papi'].prefix,
+ *std_cmake_args)
+ make("clean")
+ make()
+ make("install")
+ else:
+ cmake_prefix_path = join_path(spec['dyninst'].prefix)
+ with working_dir('build', create=True):
+ #python_vers=join_path(spec['python'].version[:2])
+ #'-DOPENMPI_DIR=%s' % openmpi_prefix_path,
+ #'-DMVAPICH_DIR=%s' % mvapich_prefix_path,
+ python_vers='%d.%d' % spec['python'].version[:2]
+ cmake('..',
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ '-DINSTRUMENTOR=%s' % instrumentor_setting,
+ '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
+ '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
+ '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
+ '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
+ '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
+ '-DPAPI_DIR=%s' % spec['papi'].prefix,
+ '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix,
+ '-DQTLIB_DIR=%s' % spec['qt'].prefix,
+ '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
+ '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
+ '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
+ '-DBoost_NO_SYSTEM_PATHS=TRUE',
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
+ *std_cmake_args)
+ make("clean")
+ make()
+ make("install")
+
+ elif '+cbtf' in spec:
+ instrumentor_setting = "cbtf"
+ cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + ':' + join_path(spec['dyninst'].prefix)
+ if '+runtime' in spec:
+ with working_dir('build_cbtf_runtime', create=True):
+ python_vers='%d.%d' % spec['python'].version[:2]
+ cmake('..',
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ '-DINSTRUMENTOR=%s' % instrumentor_setting,
+ '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
+ '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
+ '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
+ '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+ '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
+ '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
+ '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
+ '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
+ '-DBoost_NO_SYSTEM_PATHS=TRUE',
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ *std_cmake_args)
+ make("clean")
+ make()
+ make("install")
+
+ else:
+ with working_dir('build_cbtf', create=True):
+ python_vers='%d.%d' % spec['python'].version[:2]
+ #python_vers=join_path(spec['python'].version[:2])
+ cmake('..',
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ '-DINSTRUMENTOR=%s' % instrumentor_setting,
+ '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
+ '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
+ '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
+ '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix,
+ '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+ '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
+ '-DQTLIB_DIR=%s' % spec['qt'].prefix,
+ '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
+ '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
+ '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
+ '-DBoost_NO_SYSTEM_PATHS=TRUE',
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ *std_cmake_args)
+ make("clean")
+ make()
+ make("install")
+
+ #if '+frontend' in spec:
+ # with working_dir('build_frontend', create=True):
+ # tbd
+
+
+ #if '+intelmic' in spec:
+ # with working_dir('build_intelmic_compute', create=True):
+ # tbd
+ # with working_dir('build_intelmic_frontend', create=True):
+ # tbd
+
+ #if '+cray' in spec:
+ # with working_dir('build_cray_compute', create=True):
+ # tbd
+ # with working_dir('build_cray_frontend', create=True):
+ # tbd
diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py
new file mode 100644
index 0000000000..bbb169ec6b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openssl/package.py
@@ -0,0 +1,40 @@
+from spack import *
+
+class Openssl(Package):
+ """The OpenSSL Project is a collaborative effort to develop a
+ robust, commercial-grade, full-featured, and Open Source
+ toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
+ Transport Layer Security (TLS v1) protocols as well as a
+ full-strength general purpose cryptography library."""
+ homepage = "http://www.openssl.org"
+ url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
+
+ version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
+ version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
+ version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
+
+ depends_on("zlib")
+ parallel = False
+
+ def install(self, spec, prefix):
+ # OpenSSL uses a variable APPS in its Makefile. If it happens to be set
+ # in the environment, then this will override what is set in the
+ # Makefile, leading to build errors.
+ env.pop('APPS', None)
+ if spec.satisfies("=darwin-x86_64") or spec.satisfies("=ppc64"):
+ # This needs to be done for all 64-bit architectures (except Linux,
+ # where it happens automatically?)
+ env['KERNEL_BITS'] = '64'
+ config = Executable("./config")
+ config("--prefix=%s" % prefix,
+ "--openssldir=%s" % join_path(prefix, 'etc', 'openssl'),
+ "zlib",
+ "no-krb5",
+ "shared")
+ # Remove non-standard compiler options if present. These options are
+ # present e.g. on Darwin. They are non-standard, i.e. most compilers
+ # (e.g. gcc) will not accept them.
+ filter_file(r'-arch x86_64', '', 'Makefile')
+
+ make()
+ make("install")
diff --git a/var/spack/packages/otf/package.py b/var/spack/repos/builtin/packages/otf/package.py
index 52893dd265..52893dd265 100644
--- a/var/spack/packages/otf/package.py
+++ b/var/spack/repos/builtin/packages/otf/package.py
diff --git a/var/spack/repos/builtin/packages/otf2/package.py b/var/spack/repos/builtin/packages/otf2/package.py
new file mode 100644
index 0000000000..c3d61bc228
--- /dev/null
+++ b/var/spack/repos/builtin/packages/otf2/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Otf2(Package):
+ """
+ The Open Trace Format 2 is a highly scalable, memory efficient event trace data format plus support library.
+ """
+
+ homepage = "http://www.vi-hps.org/score-p"
+ url = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz"
+
+ version('2.0', '5b546188b25bc1c4e285e06dddf75dfc',
+ url="http://www.vi-hps.org/upload/packages/otf2/otf2-2.0.tar.gz")
+ version('1.5.1', '16a9df46e0da78e374f5d12c8cdc1109',
+ url='http://www.vi-hps.org/upload/packages/otf2/otf2-1.5.1.tar.gz')
+ version('1.4', 'a23c42e936eb9209c4e08b61c3cf5092',
+ url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz")
+ version('1.3.1', 'd0ffc4e858455ace4f596f910e68c9f2',
+ url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.3.1.tar.gz")
+ version('1.2.1', '8fb3e11fb7489896596ae2c7c83d7fc8',
+ url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz")
+
+ def install(self, spec, prefix):
+ configure_args=["--prefix=%s" % prefix,
+ "--enable-shared",
+ "CFLAGS=-fPIC",
+ "CXXFLAGS=-fPIC"]
+ configure(*configure_args)
+ make()
+ make("install")
diff --git a/var/spack/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py
index df43625bf5..df43625bf5 100644
--- a/var/spack/packages/pango/package.py
+++ b/var/spack/repos/builtin/packages/pango/package.py
diff --git a/var/spack/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py
index 596f7114d6..910e0aa9f9 100644
--- a/var/spack/packages/papi/package.py
+++ b/var/spack/repos/builtin/packages/papi/package.py
@@ -11,8 +11,9 @@ class Papi(Package):
components that expose performance measurement opportunites
across the hardware and software stack."""
homepage = "http://icl.cs.utk.edu/papi/index.html"
- url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.3.0.tar.gz"
+ url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.4.1.tar.gz"
+ version('5.4.1', '9134a99219c79767a11463a76b0b01a2')
version('5.3.0', '367961dd0ab426e5ae367c2713924ffb')
def install(self, spec, prefix):
@@ -20,13 +21,10 @@ class Papi(Package):
configure_args=["--prefix=%s" % prefix]
- # need to force consistency in the use of compilers
- if spec.satisfies('%gcc'):
- configure_args.append('CC=gcc')
- configure_args.append('MPICH_CC=gcc')
- if spec.satisfies('%intel'):
- configure_args.append('CC=icc')
- configure_args.append('MPICH_CC=icc')
+ # PAPI uses MPI if MPI is present; since we don't require an
+ # MPI package, we ensure that all attempts to use MPI fail, so
+ # that PAPI does not get confused
+ configure_args.append('MPICC=:')
configure(*configure_args)
diff --git a/var/spack/packages/paraver/package.py b/var/spack/repos/builtin/packages/paraver/package.py
index 5f8a153d4c..5f8a153d4c 100644
--- a/var/spack/packages/paraver/package.py
+++ b/var/spack/repos/builtin/packages/paraver/package.py
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
new file mode 100644
index 0000000000..aaab352e66
--- /dev/null
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -0,0 +1,79 @@
+from spack import *
+
+class Paraview(Package):
+ homepage = 'http://www.paraview.org'
+ url = 'http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz'
+
+ version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378', url='http://www.paraview.org/files/v4.4/ParaView-v4.4.0-source.tar.gz')
+
+ variant('python', default=False, description='Enable Python support')
+
+ variant('tcl', default=False, description='Enable TCL support')
+
+ variant('mpi', default=False, description='Enable MPI support')
+
+ variant('osmesa', default=False, description='Enable OSMesa support')
+ variant('qt', default=False, description='Enable Qt support')
+
+ depends_on('python', when='+python')
+ depends_on('py-numpy', when='+python')
+ depends_on('py-matplotlib', when='+python')
+ depends_on('tcl', when='+tcl')
+ depends_on('mpi', when='+mpi')
+ depends_on('qt@:4', when='+qt')
+
+ depends_on('bzip2')
+ depends_on('freetype')
+ depends_on('hdf5')
+ depends_on('hdf5+mpi', when='+mpi')
+ depends_on('jpeg')
+ depends_on('libpng')
+ depends_on('libtiff')
+ depends_on('libxml2')
+ depends_on('netcdf')
+ #depends_on('protobuf') # version mismatches?
+ #depends_on('sqlite') # external version not supported
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ def feature_to_bool(feature, on='ON', off='OFF'):
+ if feature in spec:
+ return on
+ return off
+
+ def nfeature_to_bool(feature):
+ return feature_to_bool(feature, on='OFF', off='ON')
+
+ feature_args = std_cmake_args[:]
+ feature_args.append('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt'))
+ feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % feature_to_bool('+python'))
+ if '+python' in spec:
+ feature_args.append('-DPYTHON_EXECUTABLE:FILEPATH=%s/bin/python' % spec['python'].prefix)
+ feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % feature_to_bool('+mpi'))
+ if '+mpi' in spec:
+ feature_args.append('-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix)
+ feature_args.append('-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl'))
+ feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % feature_to_bool('+osmesa'))
+ feature_args.append('-DVTK_USE_X:BOOL=%s' % nfeature_to_bool('+osmesa'))
+ feature_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL'))
+
+ feature_args.extend(std_cmake_args)
+
+ if 'darwin' in self.spec.architecture:
+ feature_args.append('-DVTK_USE_X:BOOL=OFF')
+ feature_args.append('-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON')
+
+ cmake('..',
+ '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix,
+ '-DBUILD_TESTING:BOOL=OFF',
+ '-DVTK_USER_SYSTEM_FREETYPE:BOOL=ON',
+ '-DVTK_USER_SYSTEM_HDF5:BOOL=ON',
+ '-DVTK_USER_SYSTEM_JPEG:BOOL=ON',
+ '-DVTK_USER_SYSTEM_LIBXML2:BOOL=ON',
+ '-DVTK_USER_SYSTEM_NETCDF:BOOL=ON',
+ '-DVTK_USER_SYSTEM_TIFF:BOOL=ON',
+ '-DVTK_USER_SYSTEM_ZLIB:BOOL=ON',
+ *feature_args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py
new file mode 100644
index 0000000000..c897dec7e4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/parmetis/package.py
@@ -0,0 +1,95 @@
+##############################################################################
+# Copyright (c) 2013-2015, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+# FIXME : lot of code is duplicated from packages/metis/package.py . Inheriting from there may reduce
+# FIXME : the installation rules to just a few lines
+
+
+class Parmetis(Package):
+ """
+ ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning unstructured
+ graphs, meshes, and for computing fill-reducing orderings of sparse matrices.
+ """
+ homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview'
+ url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz'
+
+ version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628')
+
+ variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('debug', default=False, description='Builds the library in debug mode')
+ variant('gdb', default=False, description='Enables gdb support')
+
+ variant('idx64', default=False, description='Use int64_t as default index type')
+ variant('double', default=False, description='Use double precision floating point types')
+
+ depends_on('cmake @2.8:') # build dependency
+ depends_on('mpi')
+
+ # FIXME : this should conflict with metis as it builds its own version internally
+
+ depends_on('gdb', when='+gdb')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+ metis_source = join_path(source_directory, 'metis')
+
+ # FIXME : Once a contract is defined, MPI compilers should be retrieved indirectly via spec['mpi'] in case
+ # FIXME : they use a non-standard name
+ options.extend(['-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=metis_source),
+ '-DMETIS_PATH:PATH={metis_source}'.format(metis_source=metis_source),
+ '-DCMAKE_C_COMPILER:STRING=mpicc',
+ '-DCMAKE_CXX_COMPILER:STRING=mpicxx'])
+
+ if '+shared' in spec:
+ options.append('-DSHARED:BOOL=ON')
+
+ if '+debug' in spec:
+ options.extend(['-DDEBUG:BOOL=ON',
+ '-DCMAKE_BUILD_TYPE:STRING=Debug'])
+
+ if '+gdb' in spec:
+ options.append('-DGDB:BOOL=ON')
+
+ metis_header = join_path(metis_source, 'include', 'metis.h')
+
+ if '+idx64' in spec:
+ filter_file('IDXTYPEWIDTH 32', 'IDXTYPEWIDTH 64', metis_header)
+
+ if '+double' in spec:
+ filter_file('REALTYPEWIDTH 32', 'REALTYPEWIDTH 64', metis_header)
+
+ with working_dir(build_directory, create=True):
+ cmake(source_directory, *options)
+ make()
+ make("install")
+ # Parmetis build system doesn't allow for an external metis to be used, but doesn't copy the required
+ # metis header either
+ install(metis_header, self.prefix.include)
diff --git a/var/spack/packages/parpack/package.py b/var/spack/repos/builtin/packages/parpack/package.py
index 622aceca04..622aceca04 100644
--- a/var/spack/packages/parpack/package.py
+++ b/var/spack/repos/builtin/packages/parpack/package.py
diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py
new file mode 100644
index 0000000000..036dc6bd17
--- /dev/null
+++ b/var/spack/repos/builtin/packages/patchelf/package.py
@@ -0,0 +1,16 @@
+from spack import *
+
+class Patchelf(Package):
+ """PatchELF is a small utility to modify the dynamic linker and RPATH of ELF executables."""
+
+ homepage = "https://nixos.org/patchelf.html"
+ url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz"
+ list_url = "http://nixos.org/releases/patchelf/"
+ list_depth = 2
+
+ version('0.8', '407b229e6a681ffb0e2cdd5915cb2d01')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/pcre/package.py b/var/spack/repos/builtin/packages/pcre/package.py
index 3424048a6c..e38d337e3d 100644
--- a/var/spack/packages/pcre/package.py
+++ b/var/spack/repos/builtin/packages/pcre/package.py
@@ -8,6 +8,7 @@ class Pcre(Package):
url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.36.tar.bz2"
version('8.36', 'b767bc9af0c20bc9c1fe403b0d41ad97')
+ version('8.38', '00aabbfe56d5a48b270f999b508c5ad2')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/pcre2/package.py b/var/spack/repos/builtin/packages/pcre2/package.py
new file mode 100644
index 0000000000..6a0244a15e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pcre2/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+class Pcre2(Package):
+ """The PCRE2 package contains Perl Compatible Regular Expression
+ libraries. These are useful for implementing regular expression
+ pattern matching using the same syntax and semantics as Perl 5."""
+ homepage = "http://www.pcre.org"""
+ url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre2-10.20.tar.bz2"
+
+ version('10.20', 'dcd027c57ecfdc8a6c3af9d0acf5e3f7')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/pdt/package.py b/var/spack/repos/builtin/packages/pdt/package.py
new file mode 100644
index 0000000000..ce3b793e30
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pdt/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Pdt(Package):
+ """
+ Program Database Toolkit (PDT) is a framework for analyzing source code written in several programming languages
+ and for making rich program knowledge accessible to developers of static and dynamic analysis tools. PDT implements
+ a standard program representation, the program database (PDB), that can be accessed in a uniform way through a
+ class library supporting common PDB operations.
+ """
+ homepage = "https://www.cs.uoregon.edu/research/pdt/home.php"
+ url = "https://www.cs.uoregon.edu/research/tau/pdt_releases/pdt-3.21.tar.gz"
+
+ version('3.21', '8df94298b71703decf680709a4ddf68f')
+ version('3.19', 'ba5591994998771fdab216699e362228')
+
+ def install(self, spec, prefix):
+ configure('-prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py
index 4864e39bf1..87f700629d 100644
--- a/var/spack/packages/petsc/package.py
+++ b/var/spack/repos/builtin/packages/petsc/package.py
@@ -12,20 +12,19 @@ class Petsc(Package):
version('3.5.2', 'ad170802b3b058b5deb9cd1f968e7e13')
version('3.5.1', 'a557e029711ebf425544e117ffa44d8f')
+ depends_on("python @2.6:2.9") # requires Python for building
+
depends_on("boost")
depends_on("blas")
depends_on("lapack")
depends_on("hypre")
depends_on("parmetis")
depends_on("metis")
- depends_on("hdf5")
+ depends_on("hdf5+mpi")
depends_on("mpi")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
- "CC=cc",
- "CXX=c++",
- "FC=f90",
"--with-blas-lib=%s/libblas.a" % spec['blas'].prefix.lib,
"--with-lapack-lib=%s/liblapack.a" % spec['lapack'].prefix.lib,
"--with-boost-dir=%s" % spec['boost'].prefix,
@@ -33,6 +32,7 @@ class Petsc(Package):
"--with-parmetis-dir=%s" % spec['parmetis'].prefix,
"--with-metis-dir=%s" % spec['metis'].prefix,
"--with-hdf5-dir=%s" % spec['hdf5'].prefix,
+ "--with-mpi-dir=%s" % spec['mpi'].prefix,
"--with-shared-libraries=0")
# PETSc has its own way of doing parallel make.
diff --git a/var/spack/packages/pidx/package.py b/var/spack/repos/builtin/packages/pidx/package.py
index 81aed62fb1..81aed62fb1 100644
--- a/var/spack/packages/pidx/package.py
+++ b/var/spack/repos/builtin/packages/pidx/package.py
diff --git a/var/spack/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py
index 895cbdbca5..895cbdbca5 100644
--- a/var/spack/packages/pixman/package.py
+++ b/var/spack/repos/builtin/packages/pixman/package.py
diff --git a/var/spack/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py
index 9964c6ce34..9964c6ce34 100644
--- a/var/spack/packages/pkg-config/package.py
+++ b/var/spack/repos/builtin/packages/pkg-config/package.py
diff --git a/var/spack/packages/pmgr_collective/package.py b/var/spack/repos/builtin/packages/pmgr_collective/package.py
index 5d9b02acc3..1fc47c658f 100644
--- a/var/spack/packages/pmgr_collective/package.py
+++ b/var/spack/repos/builtin/packages/pmgr_collective/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/postgresql/package.py b/var/spack/repos/builtin/packages/postgresql/package.py
index 46922b7b71..46922b7b71 100644
--- a/var/spack/packages/postgresql/package.py
+++ b/var/spack/repos/builtin/packages/postgresql/package.py
diff --git a/var/spack/packages/ppl/package.py b/var/spack/repos/builtin/packages/ppl/package.py
index 018d5c523d..018d5c523d 100644
--- a/var/spack/packages/ppl/package.py
+++ b/var/spack/repos/builtin/packages/ppl/package.py
diff --git a/var/spack/packages/protobuf/package.py b/var/spack/repos/builtin/packages/protobuf/package.py
index 34085c7ce9..34085c7ce9 100644
--- a/var/spack/packages/protobuf/package.py
+++ b/var/spack/repos/builtin/packages/protobuf/package.py
diff --git a/var/spack/repos/builtin/packages/py-astropy/package.py b/var/spack/repos/builtin/packages/py-astropy/package.py
new file mode 100644
index 0000000000..d138a514f6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-astropy/package.py
@@ -0,0 +1,28 @@
+from spack import *
+
+class PyAstropy(Package):
+ """
+ The Astropy Project is a community effort to develop a single core
+ package for Astronomy in Python and foster interoperability between
+ Python astronomy packages.
+ """
+ homepage = 'http://www.astropy.org/'
+
+ version('1.1.post1', 'b52919f657a37d45cc45f5cb0f58c44d')
+
+ def url_for_version(self, version):
+ return 'https://pypi.python.org/packages/source/a/astropy/astropy-{0}.tar.gz'.format(version)
+
+ extends('python')
+
+ depends_on('cfitsio')
+ depends_on('expat')
+ depends_on('py-h5py')
+ depends_on('py-numpy')
+ depends_on('py-scipy')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'build', '--use-system-cfitsio',
+ '--use-system-expat')
+ python('setup.py', 'install', '--prefix=' + prefix)
+
diff --git a/var/spack/packages/py-basemap/package.py b/var/spack/repos/builtin/packages/py-basemap/package.py
index 45f1085ba1..8dfc99b28d 100644
--- a/var/spack/packages/py-basemap/package.py
+++ b/var/spack/repos/builtin/packages/py-basemap/package.py
@@ -11,8 +11,8 @@ class PyBasemap(Package):
extends('python')
depends_on('py-setuptools')
depends_on('py-numpy')
- depends_on('py-matplotlib')
- depends_on('py-pil')
+ depends_on('py-matplotlib+gui')
+ depends_on('py-pillow')
depends_on("geos")
def install(self, spec, prefix):
diff --git a/var/spack/packages/py-biopython/package.py b/var/spack/repos/builtin/packages/py-biopython/package.py
index 8ecaf48626..8ecaf48626 100644
--- a/var/spack/packages/py-biopython/package.py
+++ b/var/spack/repos/builtin/packages/py-biopython/package.py
diff --git a/var/spack/repos/builtin/packages/py-blessings/package.py b/var/spack/repos/builtin/packages/py-blessings/package.py
new file mode 100644
index 0000000000..f2475a0efd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-blessings/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+class PyBlessings(Package):
+ """A nicer, kinder way to write to the terminal """
+ homepage = "https://github.com/erikrose/blessings"
+ url = "https://pypi.python.org/packages/source/b/blessings/blessings-1.6.tar.gz"
+
+ version('1.6', '4f552a8ebcd4982693c92571beb99394')
+
+ depends_on('py-setuptools')
+
+ extends("python")
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/py-cffi/package.py b/var/spack/repos/builtin/packages/py-cffi/package.py
index a4d37483fe..909049a67c 100644
--- a/var/spack/packages/py-cffi/package.py
+++ b/var/spack/repos/builtin/packages/py-cffi/package.py
@@ -4,7 +4,7 @@ class PyCffi(Package):
"""Foreign Function Interface for Python calling C code"""
homepage = "http://cffi.readthedocs.org/en/latest/"
# base https://pypi.python.org/pypi/cffi
- url = "https://pypi.python.org/packages/source/c/cffi/cffi-1.1.2.tar.gz#md5="
+ url = "https://pypi.python.org/packages/source/c/cffi/cffi-1.1.2.tar.gz"
version('1.1.2', 'ca6e6c45b45caa87aee9adc7c796eaea')
diff --git a/var/spack/repos/builtin/packages/py-coverage/package.py b/var/spack/repos/builtin/packages/py-coverage/package.py
new file mode 100644
index 0000000000..39b2ac3b01
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-coverage/package.py
@@ -0,0 +1,16 @@
+from spack import *
+
+class PyCoverage(Package):
+ """ Testing coverage checker for python """
+ # FIXME: add a proper url for your package's homepage here.
+ homepage = "http://nedbatchelder.com/code/coverage/"
+ url = "https://pypi.python.org/packages/source/c/coverage/coverage-4.0a6.tar.gz"
+
+ version('4.0a6', '1bb4058062646148965bef0796b61efc')
+
+ depends_on('py-setuptools')
+
+ extends('python')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py
index 68eb735ad9..68eb735ad9 100644
--- a/var/spack/packages/py-cython/package.py
+++ b/var/spack/repos/builtin/packages/py-cython/package.py
diff --git a/var/spack/packages/py-dateutil/package.py b/var/spack/repos/builtin/packages/py-dateutil/package.py
index 0a17f2f2d2..0a17f2f2d2 100644
--- a/var/spack/packages/py-dateutil/package.py
+++ b/var/spack/repos/builtin/packages/py-dateutil/package.py
diff --git a/var/spack/packages/py-epydoc/package.py b/var/spack/repos/builtin/packages/py-epydoc/package.py
index af05510504..af05510504 100644
--- a/var/spack/packages/py-epydoc/package.py
+++ b/var/spack/repos/builtin/packages/py-epydoc/package.py
diff --git a/var/spack/repos/builtin/packages/py-funcsigs/package.py b/var/spack/repos/builtin/packages/py-funcsigs/package.py
new file mode 100644
index 0000000000..a428890288
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-funcsigs/package.py
@@ -0,0 +1,19 @@
+from spack import *
+import os
+
+class PyFuncsigs(Package):
+ """Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2."""
+ homepage = "https://pypi.python.org/pypi/funcsigs"
+ url = "https://pypi.python.org/packages/source/f/funcsigs/funcsigs-0.4.tar.gz"
+
+ version('0.4', 'fb1d031f284233e09701f6db1281c2a5')
+
+ extends('python')
+
+ depends_on('py-setuptools')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
+
+
+
diff --git a/var/spack/packages/py-genders/package.py b/var/spack/repos/builtin/packages/py-genders/package.py
index c49c8fd5b2..c49c8fd5b2 100644
--- a/var/spack/packages/py-genders/package.py
+++ b/var/spack/repos/builtin/packages/py-genders/package.py
diff --git a/var/spack/packages/py-gnuplot/package.py b/var/spack/repos/builtin/packages/py-gnuplot/package.py
index ede4472c03..ede4472c03 100644
--- a/var/spack/packages/py-gnuplot/package.py
+++ b/var/spack/repos/builtin/packages/py-gnuplot/package.py
diff --git a/var/spack/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py
index 6293da5407..6293da5407 100644
--- a/var/spack/packages/py-h5py/package.py
+++ b/var/spack/repos/builtin/packages/py-h5py/package.py
diff --git a/var/spack/packages/py-ipython/package.py b/var/spack/repos/builtin/packages/py-ipython/package.py
index 8d0e64a07f..8d0e64a07f 100644
--- a/var/spack/packages/py-ipython/package.py
+++ b/var/spack/repos/builtin/packages/py-ipython/package.py
diff --git a/var/spack/packages/py-libxml2/package.py b/var/spack/repos/builtin/packages/py-libxml2/package.py
index 59005428e4..59005428e4 100644
--- a/var/spack/packages/py-libxml2/package.py
+++ b/var/spack/repos/builtin/packages/py-libxml2/package.py
diff --git a/var/spack/packages/py-lockfile/package.py b/var/spack/repos/builtin/packages/py-lockfile/package.py
index 8722914d94..8722914d94 100644
--- a/var/spack/packages/py-lockfile/package.py
+++ b/var/spack/repos/builtin/packages/py-lockfile/package.py
diff --git a/var/spack/packages/py-mako/package.py b/var/spack/repos/builtin/packages/py-mako/package.py
index 3e91ffd8e5..3e91ffd8e5 100644
--- a/var/spack/packages/py-mako/package.py
+++ b/var/spack/repos/builtin/packages/py-mako/package.py
diff --git a/var/spack/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py
index e7ce3dfd24..2167735fb8 100644
--- a/var/spack/packages/py-matplotlib/package.py
+++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py
@@ -9,21 +9,28 @@ class PyMatplotlib(Package):
version('1.4.2', '7d22efb6cce475025733c50487bd8898')
version('1.4.3', '86af2e3e3c61849ac7576a6f5ca44267')
- extends('python', ignore=r'bin/nosetests.*$')
+ variant('gui', default=False, description='Enable GUI')
+ variant('ipython', default=False, description='Enable ipython support')
- depends_on('py-pyside')
- depends_on('py-ipython')
+ extends('python', ignore=r'bin/nosetests.*$|bin/pbr$')
+
+ depends_on('py-pyside', when='+gui')
+ depends_on('py-ipython', when='+ipython')
depends_on('py-pyparsing')
depends_on('py-six')
depends_on('py-dateutil')
depends_on('py-pytz')
depends_on('py-nose')
depends_on('py-numpy')
+ depends_on('py-mock')
+ depends_on('py-pbr')
+ depends_on('py-funcsigs')
- depends_on('qt')
+ depends_on('freetype')
+ depends_on('qt', when='+gui')
depends_on('bzip2')
- depends_on('tcl')
- depends_on('tk')
+ depends_on('tcl', when='+gui')
+ depends_on('tk', when='+gui')
depends_on('qhull')
def install(self, spec, prefix):
diff --git a/var/spack/packages/py-mock/package.py b/var/spack/repos/builtin/packages/py-mock/package.py
index 3b08428ba0..e89af8802a 100644
--- a/var/spack/packages/py-mock/package.py
+++ b/var/spack/repos/builtin/packages/py-mock/package.py
@@ -11,6 +11,7 @@ class PyMock(Package):
version('1.3.0', '73ee8a4afb3ff4da1b4afa287f39fdeb')
extends('python')
+ depends_on('py-pbr')
depends_on('py-setuptools@17.1:')
def install(self, spec, prefix):
diff --git a/var/spack/packages/py-mpi4py/package.py b/var/spack/repos/builtin/packages/py-mpi4py/package.py
index 8001689a18..8001689a18 100644
--- a/var/spack/packages/py-mpi4py/package.py
+++ b/var/spack/repos/builtin/packages/py-mpi4py/package.py
diff --git a/var/spack/packages/py-mx/package.py b/var/spack/repos/builtin/packages/py-mx/package.py
index 717ee0562b..717ee0562b 100644
--- a/var/spack/packages/py-mx/package.py
+++ b/var/spack/repos/builtin/packages/py-mx/package.py
diff --git a/var/spack/repos/builtin/packages/py-mysqldb1/package.py b/var/spack/repos/builtin/packages/py-mysqldb1/package.py
new file mode 100644
index 0000000000..fda02b4982
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-mysqldb1/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+class PyMysqldb1(Package):
+ """Legacy mysql bindings for python"""
+ homepage = "https://github.com/farcepest/MySQLdb1"
+ url = "https://github.com/farcepest/MySQLdb1/archive/MySQLdb-1.2.5.tar.gz"
+
+ version('1.2.5', '332c8f4955b6bc0c79ea15170bf7321b')
+
+ extends('python')
+ depends_on('py-setuptools')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py
index e7c6cf0264..e7c6cf0264 100644
--- a/var/spack/packages/py-nose/package.py
+++ b/var/spack/repos/builtin/packages/py-nose/package.py
diff --git a/var/spack/repos/builtin/packages/py-numexpr/package.py b/var/spack/repos/builtin/packages/py-numexpr/package.py
new file mode 100644
index 0000000000..89f8a525b1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-numexpr/package.py
@@ -0,0 +1,15 @@
+from spack import *
+import re
+
+class PyNumexpr(Package):
+ """Fast numerical expression evaluator for NumPy"""
+ homepage = "https://pypi.python.org/pypi/numexpr"
+ url = "https://pypi.python.org/packages/source/n/numexpr/numexpr-2.4.6.tar.gz"
+
+ version('2.4.6', '17ac6fafc9ea1ce3eb970b9abccb4fbd')
+
+ extends('python')
+ depends_on('py-numpy')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py
new file mode 100644
index 0000000000..0354811186
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-numpy/package.py
@@ -0,0 +1,24 @@
+from spack import *
+
+class PyNumpy(Package):
+ """array processing for numbers, strings, records, and objects."""
+ homepage = "https://pypi.python.org/pypi/numpy"
+ url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz"
+
+ version('1.9.1', '78842b73560ec378142665e712ae4ad9')
+ version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645')
+
+ variant('blas', default=True)
+
+ extends('python')
+ depends_on('py-nose')
+ depends_on('netlib-blas+fpic', when='+blas')
+ depends_on('netlib-lapack+shared', when='+blas')
+
+ def install(self, spec, prefix):
+ if '+blas' in spec:
+ with open('site.cfg', 'w') as f:
+ f.write('[DEFAULT]\n')
+ f.write('libraries=lapack,blas\n')
+ f.write('library_dirs=%s/lib:%s/lib\n' % (spec['blas'].prefix, spec['lapack'].prefix))
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py
index 5b9997faa9..5b9997faa9 100644
--- a/var/spack/packages/py-pandas/package.py
+++ b/var/spack/repos/builtin/packages/py-pandas/package.py
diff --git a/var/spack/repos/builtin/packages/py-pbr/package.py b/var/spack/repos/builtin/packages/py-pbr/package.py
new file mode 100644
index 0000000000..02957483d4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pbr/package.py
@@ -0,0 +1,18 @@
+from spack import *
+import os
+
+class PyPbr(Package):
+ """PBR is a library that injects some useful and sensible default behaviors into your setuptools run."""
+ homepage = "https://pypi.python.org/pypi/pbr"
+ url = "https://pypi.python.org/packages/source/p/pbr/pbr-1.8.1.tar.gz"
+
+ version('1.8.1', 'c8f9285e1a4ca6f9654c529b158baa3a')
+
+ extends('python')
+
+ depends_on('py-setuptools')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
+
+
diff --git a/var/spack/repos/builtin/packages/py-periodictable/package.py b/var/spack/repos/builtin/packages/py-periodictable/package.py
new file mode 100644
index 0000000000..6a495a1cc8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-periodictable/package.py
@@ -0,0 +1,17 @@
+from spack import *
+
+class PyPeriodictable(Package):
+ """nose extends the test loading and running features of unittest,
+ making it easier to write, find and run tests."""
+
+ homepage = "https://pypi.python.org/pypi/periodictable"
+ url = "https://pypi.python.org/packages/source/p/periodictable/periodictable-1.4.1.tar.gz"
+
+ version('1.4.1', '7246b63cc0b6b1be6e86b6616f9e866e')
+
+ depends_on('py-numpy')
+ depends_on('py-pyparsing')
+ extends('python')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/py-pexpect/package.py b/var/spack/repos/builtin/packages/py-pexpect/package.py
index ff5fac84e0..ff5fac84e0 100644
--- a/var/spack/packages/py-pexpect/package.py
+++ b/var/spack/repos/builtin/packages/py-pexpect/package.py
diff --git a/var/spack/packages/py-pil/package.py b/var/spack/repos/builtin/packages/py-pil/package.py
index 743b761981..743b761981 100644
--- a/var/spack/packages/py-pil/package.py
+++ b/var/spack/repos/builtin/packages/py-pil/package.py
diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py
new file mode 100644
index 0000000000..adc8507bd5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pillow/package.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class PyPillow(Package):
+ """Pillow is the friendly PIL fork by Alex Clark and Contributors. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities."""
+
+ homepage = "https://python-pillow.github.io/"
+ url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-3.0.0.tar.gz"
+
+ version('3.0.0', 'fc8ac44e93da09678eac7e30c9b7377d')
+ extends('python')
+ depends_on('py-setuptools')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/py-pmw/package.py b/var/spack/repos/builtin/packages/py-pmw/package.py
index 56131811e9..56131811e9 100644
--- a/var/spack/packages/py-pmw/package.py
+++ b/var/spack/repos/builtin/packages/py-pmw/package.py
diff --git a/var/spack/packages/py-pychecker/package.py b/var/spack/repos/builtin/packages/py-pychecker/package.py
index bda5a746aa..bda5a746aa 100644
--- a/var/spack/packages/py-pychecker/package.py
+++ b/var/spack/repos/builtin/packages/py-pychecker/package.py
diff --git a/var/spack/packages/py-pycparser/package.py b/var/spack/repos/builtin/packages/py-pycparser/package.py
index f2bb679d25..f2bb679d25 100644
--- a/var/spack/packages/py-pycparser/package.py
+++ b/var/spack/repos/builtin/packages/py-pycparser/package.py
diff --git a/var/spack/packages/py-pyelftools/package.py b/var/spack/repos/builtin/packages/py-pyelftools/package.py
index d5ad32e624..d5ad32e624 100644
--- a/var/spack/packages/py-pyelftools/package.py
+++ b/var/spack/repos/builtin/packages/py-pyelftools/package.py
diff --git a/var/spack/packages/py-pygments/package.py b/var/spack/repos/builtin/packages/py-pygments/package.py
index 7e07bf6869..7e07bf6869 100644
--- a/var/spack/packages/py-pygments/package.py
+++ b/var/spack/repos/builtin/packages/py-pygments/package.py
diff --git a/var/spack/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py
index 9579708c29..9579708c29 100644
--- a/var/spack/packages/py-pylint/package.py
+++ b/var/spack/repos/builtin/packages/py-pylint/package.py
diff --git a/var/spack/packages/py-pypar/package.py b/var/spack/repos/builtin/packages/py-pypar/package.py
index af9c76ccd8..af9c76ccd8 100644
--- a/var/spack/packages/py-pypar/package.py
+++ b/var/spack/repos/builtin/packages/py-pypar/package.py
diff --git a/var/spack/packages/py-pyparsing/package.py b/var/spack/repos/builtin/packages/py-pyparsing/package.py
index a6e50ad139..a6e50ad139 100644
--- a/var/spack/packages/py-pyparsing/package.py
+++ b/var/spack/repos/builtin/packages/py-pyparsing/package.py
diff --git a/var/spack/packages/py-pyqt/package.py b/var/spack/repos/builtin/packages/py-pyqt/package.py
index 8edca105bb..8edca105bb 100644
--- a/var/spack/packages/py-pyqt/package.py
+++ b/var/spack/repos/builtin/packages/py-pyqt/package.py
diff --git a/var/spack/packages/py-pyside/package.py b/var/spack/repos/builtin/packages/py-pyside/package.py
index bb5da44d02..ffa433e18e 100644
--- a/var/spack/packages/py-pyside/package.py
+++ b/var/spack/repos/builtin/packages/py-pyside/package.py
@@ -2,7 +2,7 @@ from spack import *
import os
class PyPyside(Package):
- """array processing for numbers, strings, records, and objects."""
+ """Python bindings for Qt."""
homepage = "https://pypi.python.org/pypi/pyside"
url = "https://pypi.python.org/packages/source/P/PySide/PySide-1.2.2.tar.gz"
diff --git a/var/spack/repos/builtin/packages/py-pytables/package.py b/var/spack/repos/builtin/packages/py-pytables/package.py
new file mode 100644
index 0000000000..a5b1e78ab3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pytables/package.py
@@ -0,0 +1,19 @@
+from spack import *
+import re
+
+class PyPytables(Package):
+ """PyTables is a package for managing hierarchical datasets and designed to efficiently and easily cope with extremely large amounts of data."""
+ homepage = "http://www.pytables.org/"
+ url = "https://github.com/PyTables/PyTables/archive/v.3.2.2.tar.gz"
+
+ version('3.2.2', '7cbb0972e4d6580f629996a5bed92441')
+
+ extends('python')
+ depends_on('hdf5')
+ depends_on('py-numpy')
+ depends_on('py-numexpr')
+ depends_on('py-cython')
+
+ def install(self, spec, prefix):
+ env["HDF5_DIR"] = spec['hdf5'].prefix
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/py-python-daemon/package.py b/var/spack/repos/builtin/packages/py-python-daemon/package.py
index 12cbe9101c..12cbe9101c 100644
--- a/var/spack/packages/py-python-daemon/package.py
+++ b/var/spack/repos/builtin/packages/py-python-daemon/package.py
diff --git a/var/spack/packages/py-pytz/package.py b/var/spack/repos/builtin/packages/py-pytz/package.py
index da6311a784..da6311a784 100644
--- a/var/spack/packages/py-pytz/package.py
+++ b/var/spack/repos/builtin/packages/py-pytz/package.py
diff --git a/var/spack/packages/py-rpy2/package.py b/var/spack/repos/builtin/packages/py-rpy2/package.py
index a0b03d03e3..a0b03d03e3 100644
--- a/var/spack/packages/py-rpy2/package.py
+++ b/var/spack/repos/builtin/packages/py-rpy2/package.py
diff --git a/var/spack/packages/py-scientificpython/package.py b/var/spack/repos/builtin/packages/py-scientificpython/package.py
index df2c86caac..df2c86caac 100644
--- a/var/spack/packages/py-scientificpython/package.py
+++ b/var/spack/repos/builtin/packages/py-scientificpython/package.py
diff --git a/var/spack/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
index 5b078ce901..5b078ce901 100644
--- a/var/spack/packages/py-scikit-learn/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
diff --git a/var/spack/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py
index 3a1124cc15..3a1124cc15 100644
--- a/var/spack/packages/py-scipy/package.py
+++ b/var/spack/repos/builtin/packages/py-scipy/package.py
diff --git a/var/spack/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py
index 760ad4d6db..26c048bfd4 100644
--- a/var/spack/packages/py-setuptools/package.py
+++ b/var/spack/repos/builtin/packages/py-setuptools/package.py
@@ -8,6 +8,7 @@ class PySetuptools(Package):
version('11.3.1', '01f69212e019a2420c1693fb43593930')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
+ version('19.2', '78353b1f80375ca5e088f4b4627ffe03')
extends('python')
diff --git a/var/spack/packages/py-shiboken/package.py b/var/spack/repos/builtin/packages/py-shiboken/package.py
index e4bf4ce07e..e4bf4ce07e 100644
--- a/var/spack/packages/py-shiboken/package.py
+++ b/var/spack/repos/builtin/packages/py-shiboken/package.py
diff --git a/var/spack/packages/py-sip/package.py b/var/spack/repos/builtin/packages/py-sip/package.py
index e4a6fb6961..e4a6fb6961 100644
--- a/var/spack/packages/py-sip/package.py
+++ b/var/spack/repos/builtin/packages/py-sip/package.py
diff --git a/var/spack/packages/py-six/package.py b/var/spack/repos/builtin/packages/py-six/package.py
index 05c5bd00a9..05c5bd00a9 100644
--- a/var/spack/packages/py-six/package.py
+++ b/var/spack/repos/builtin/packages/py-six/package.py
diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py
new file mode 100644
index 0000000000..ec2e89a098
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sphinx/package.py
@@ -0,0 +1,13 @@
+from spack import *
+
+class PySphinx(Package):
+ """Sphinx Documentation Generator."""
+ homepage = "http://sphinx-doc.org"
+ url = "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.3.1.tar.gz"
+
+ version('1.3.1', '8786a194acf9673464c5455b11fd4332')
+
+ extends('python')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/packages/py-sympy/package.py b/var/spack/repos/builtin/packages/py-sympy/package.py
index c17e35b95f..c17e35b95f 100644
--- a/var/spack/packages/py-sympy/package.py
+++ b/var/spack/repos/builtin/packages/py-sympy/package.py
diff --git a/var/spack/repos/builtin/packages/py-tappy/package.py b/var/spack/repos/builtin/packages/py-tappy/package.py
new file mode 100644
index 0000000000..df61a909da
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tappy/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+class PyTappy(Package):
+ """Python TAP interface module for unit tests"""
+ homepage = "https://github.com/mblayman/tappy"
+ # base https://pypi.python.org/pypi/cffi
+ url = "https://pypi.python.org/packages/source/t/tap.py/tap.py-1.6.tar.gz"
+
+ version('1.6', 'c8bdb93ad66e05f939905172a301bedf')
+
+ extends('python')
+ depends_on('py-setuptools')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-twisted/package.py b/var/spack/repos/builtin/packages/py-twisted/package.py
new file mode 100644
index 0000000000..2fdebb6cb9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-twisted/package.py
@@ -0,0 +1,16 @@
+from spack import *
+
+class PyTwisted(Package):
+ """An asynchronous networking framework written in Python"""
+ homepage = "https://twistedmatrix.com/"
+ url = "https://pypi.python.org/packages/source/T/Twisted/Twisted-15.3.0.tar.bz2"
+
+ version('15.4.0', '5337ffb6aeeff3790981a2cd56db9655')
+ version('15.3.0', 'b58e83da2f00b3352afad74d0c5c4599')
+
+ depends_on('py-setuptools')
+
+ extends('python')
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-urwid/package.py b/var/spack/repos/builtin/packages/py-urwid/package.py
new file mode 100644
index 0000000000..aaa11c681d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-urwid/package.py
@@ -0,0 +1,16 @@
+from spack import *
+
+class PyUrwid(Package):
+ """A full-featured console UI library"""
+ homepage = "http://urwid.org/"
+ url = "https://pypi.python.org/packages/source/u/urwid/urwid-1.3.0.tar.gz"
+
+ version('1.3.0', 'a989acd54f4ff1a554add464803a9175')
+
+ depends_on('py-setuptools')
+
+ extends("python")
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
+
diff --git a/var/spack/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py
index 037a6fc59f..037a6fc59f 100644
--- a/var/spack/packages/py-virtualenv/package.py
+++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py
diff --git a/var/spack/packages/py-yapf/package.py b/var/spack/repos/builtin/packages/py-yapf/package.py
index 12ef191515..12ef191515 100644
--- a/var/spack/packages/py-yapf/package.py
+++ b/var/spack/repos/builtin/packages/py-yapf/package.py
diff --git a/var/spack/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index 000881a846..a1ce06feb0 100644
--- a/var/spack/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -2,6 +2,7 @@ import os
import re
from contextlib import closing
from llnl.util.lang import match_predicate
+from spack.util.environment import *
from spack import *
import spack
@@ -10,27 +11,47 @@ import spack
class Python(Package):
"""The Python programming language."""
homepage = "http://www.python.org"
- url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tar.xz"
+ url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tgz"
extendable = True
- version('2.7.8', 'd235bdfa75b8396942e360a70487ee00')
- version('2.7.10', 'c685ef0b8e9f27b5e3db5db12b268ac6')
+ version('3.5.1', 'be78e48cdfc1a7ad90efff146dce6cfe')
+ version('3.5.0', 'a56c0c0b45d75a0ec9c6dee933c41c36')
+ version('2.7.11', '6b6076ec9e93f05dd63e47eb9c15728b', preferred=True)
+ version('2.7.10', 'd7547558fd673bd9d38e2108c6b42521')
+ version('2.7.9', '5eebcaa0030dc4061156d3429657fb83')
+ version('2.7.8', 'd4bca0159acb0b44a781292b5231936f')
depends_on("openssl")
depends_on("bzip2")
depends_on("readline")
depends_on("ncurses")
depends_on("sqlite")
+ depends_on("zlib")
def install(self, spec, prefix):
# Need this to allow python build to find the Python installation.
env['PYTHONHOME'] = prefix
+ env['MACOSX_DEPLOYMENT_TARGET'] = '10.6'
- # Rest of install is pretty standard.
- configure("--prefix=%s" % prefix,
+ # Rest of install is pretty standard except setup.py needs to be able to read the CPPFLAGS
+ # and LDFLAGS as it scans for the library and headers to build
+ configure_args= [
+ "--prefix=%s" % prefix,
"--with-threads",
- "--enable-shared")
+ "--enable-shared",
+ "CPPFLAGS=-I%s/include -I%s/include -I%s/include -I%s/include -I%s/include -I%s/include" % (
+ spec['openssl'].prefix, spec['bzip2'].prefix,
+ spec['readline'].prefix, spec['ncurses'].prefix,
+ spec['sqlite'].prefix, spec['zlib'].prefix),
+ "LDFLAGS=-L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib" % (
+ spec['openssl'].prefix, spec['bzip2'].prefix,
+ spec['readline'].prefix, spec['ncurses'].prefix,
+ spec['sqlite'].prefix, spec['zlib'].prefix)
+ ]
+ if spec.satisfies('@3:'):
+ configure_args.append('--without-ensurepip')
+ configure(*configure_args)
make()
make("install")
@@ -62,7 +83,10 @@ class Python(Package):
python('setup.py', 'install', '--prefix=%s' % prefix)
"""
# Python extension builds can have a global python executable function
- module.python = Executable(join_path(spec.prefix.bin, 'python'))
+ if self.version >= Version("3.0.0") and self.version < Version("4.0.0"):
+ module.python = Executable(join_path(spec.prefix.bin, 'python3'))
+ else:
+ module.python = Executable(join_path(spec.prefix.bin, 'python'))
# Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir)
@@ -94,7 +118,7 @@ class Python(Package):
# Ignore pieces of setuptools installed by other packages.
if ext_pkg.name != 'py-setuptools':
- patterns.append(r'/site\.pyc?$')
+ patterns.append(r'/site[^/]*\.pyc?$')
patterns.append(r'setuptools\.pth')
patterns.append(r'bin/easy_install[^/]*$')
patterns.append(r'setuptools.*egg$')
diff --git a/var/spack/packages/qhull/package.py b/var/spack/repos/builtin/packages/qhull/package.py
index 9da4078a70..f6712ced38 100644
--- a/var/spack/packages/qhull/package.py
+++ b/var/spack/repos/builtin/packages/qhull/package.py
@@ -20,6 +20,9 @@ class Qhull(Package):
version('1.0', 'd0f978c0d8dfb2e919caefa56ea2953c',
url="http://www.qhull.org/download/qhull-2012.1-src.tgz")
+ # https://github.com/qhull/qhull/pull/5
+ patch('qhull-iterator.patch')
+
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
cmake('..', *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/qhull/qhull-iterator.patch b/var/spack/repos/builtin/packages/qhull/qhull-iterator.patch
new file mode 100644
index 0000000000..88e931d84f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qhull/qhull-iterator.patch
@@ -0,0 +1,45 @@
+From 93f4b306c54bb5be7724dcc19c6e747b62ac76dd Mon Sep 17 00:00:00 2001
+From: Ben Boeckel <mathstuf@gmail.com>
+Date: Thu, 28 May 2015 11:12:25 -0400
+Subject: [PATCH] iterator: use the header
+
+Standard libraries are doing funky things with inline namespaces which
+make these declarations impossible to get right. Just include the
+header.
+---
+ src/libqhullcpp/QhullIterator.h | 3 +--
+ src/libqhullcpp/QhullLinkedList.h | 5 +----
+ 2 files changed, 2 insertions(+), 6 deletions(-)
+
+diff --git a/src/libqhullcpp/QhullIterator.h b/src/libqhullcpp/QhullIterator.h
+index 9dde894..49f3a3b 100644
+--- a/src/libqhullcpp/QhullIterator.h
++++ b/src/libqhullcpp/QhullIterator.h
+@@ -14,10 +14,9 @@ extern "C" {
+ }
+
+ #include <assert.h>
++#include <iterator>
+ #include <string>
+ #include <vector>
+-//! Avoid dependence on <iterator>
+-namespace std { struct bidirectional_iterator_tag; struct random_access_iterator_tag; }
+
+ namespace orgQhull {
+
+diff --git a/src/libqhullcpp/QhullLinkedList.h b/src/libqhullcpp/QhullLinkedList.h
+index d828ac6..00b9008 100644
+--- a/src/libqhullcpp/QhullLinkedList.h
++++ b/src/libqhullcpp/QhullLinkedList.h
+@@ -9,10 +9,7 @@
+ #ifndef QHULLLINKEDLIST_H
+ #define QHULLLINKEDLIST_H
+
+-namespace std {
+- struct bidirectional_iterator_tag;
+- struct random_access_iterator_tag;
+-}//std
++#include <iterator>
+
+ #include "QhullError.h"
+ extern "C" {
diff --git a/var/spack/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py
index 0e4abe3b1d..e8d843519d 100644
--- a/var/spack/packages/qt/package.py
+++ b/var/spack/repos/builtin/packages/qt/package.py
@@ -10,14 +10,23 @@ class Qt(Package):
version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6',
url='http://download.qt-project.org/official_releases/qt/5.4/5.4.0/single/qt-everywhere-opensource-src-5.4.0.tar.gz')
+
version('5.3.2', 'febb001129927a70174467ecb508a682',
url='http://download.qt.io/archive/qt/5.3/5.3.2/single/qt-everywhere-opensource-src-5.3.2.tar.gz')
version('5.2.1', 'a78408c887c04c34ce615da690e0b4c8',
url='http://download.qt.io/archive/qt/5.2/5.2.1/single/qt-everywhere-opensource-src-5.2.1.tar.gz')
+
version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb',
url="http://download.qt-project.org/official_releases/qt/4.8/4.8.6/qt-everywhere-opensource-src-4.8.6.tar.gz")
+ version('3.3.8b', '9f05b4125cfe477cc52c9742c3c09009',
+ url="http://download.qt.io/archive/qt/3/qt-x11-free-3.3.8b.tar.gz")
+
+ # Add patch for compile issues with qt3 found with use in the OpenSpeedShop project
+ variant('krellpatch', default=False, description="build with openspeedshop based patch.")
+ patch('qt3krell.patch', when='@3.3.8b+krellpatch')
+
# Use system openssl for security.
#depends_on("openssl")
@@ -25,7 +34,7 @@ class Qt(Package):
depends_on("gtkplus")
depends_on("libxml2")
depends_on("zlib")
- depends_on("dbus")
+ depends_on("dbus", when='@4:')
depends_on("libtiff")
depends_on("libpng")
depends_on("libmng")
@@ -39,7 +48,7 @@ class Qt(Package):
# depends_on("icu4c")
# OpenGL hardware acceleration
- depends_on("mesa")
+ depends_on("mesa", when='@4:')
depends_on("libxcb")
@@ -85,6 +94,15 @@ class Qt(Package):
# Don't disable all the database drivers, but should
# really get them into spack at some point.
+ @when('@3')
+ def configure(self):
+ configure('-prefix', self.prefix,
+ '-v',
+ '-thread',
+ '-shared',
+ '-release',
+ '-fast'
+ )
@when('@4')
def configure(self):
diff --git a/var/spack/repos/builtin/packages/qt/qt3krell.patch b/var/spack/repos/builtin/packages/qt/qt3krell.patch
new file mode 100644
index 0000000000..3333eeacd4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qt/qt3krell.patch
@@ -0,0 +1,68 @@
+--- qt-x11-free-3.3.8b/src/tools/qmap.h 2008-01-15 13:09:13.000000000 -0600
++++ qt-x11-free-3.3.8b-fixes/src/tools/qmap.h 2015-07-08 15:47:34.757565247 -0500
+@@ -52,6 +52,7 @@
+ #ifndef QT_NO_STL
+ #include <iterator>
+ #include <map>
++#include <cstddef>
+ #endif
+
+ //#define QT_CHECK_MAP_RANGE
+--- qt-x11-free-3.3.8b/src/tools/qvaluelist.h 2008-01-15 13:09:13.000000000 -0600
++++ qt-x11-free-3.3.8b-fixes/src/tools/qvaluelist.h 2015-07-08 15:47:34.758565247 -0500
+@@ -50,6 +50,7 @@
+ #ifndef QT_NO_STL
+ #include <iterator>
+ #include <list>
++#include <cstddef>
+ #endif
+
+ //#define QT_CHECK_VALUELIST_RANGE
+--- qt-x11-free-3.3.8b/src/tools/qvaluevector.h 2008-01-15 13:09:13.000000000 -0600
++++ qt-x11-free-3.3.8b-fixes/src/tools/qvaluevector.h 2015-07-08 15:47:34.758565247 -0500
+@@ -47,6 +47,7 @@
+
+ #ifndef QT_NO_STL
+ #include <vector>
++#include <cstddef>
+ #endif
+
+ template <class T>
+--- qt-x11-free-3.3.8b/configure 2008-01-15 13:09:15.000000000 -0600
++++ qt-x11-free-3.3.8b-fixes/configure 2015-07-08 15:49:03.379560633 -0500
+@@ -2339,7 +2339,7 @@
+ else
+ echo "Do you accept the terms of the $TheLicense? \c"
+ fi
+- read acceptance
++ acceptance=yes
+ echo
+ if [ "$acceptance" = yes ]; then
+ break
+@@ -2397,7 +2397,7 @@
+ else
+ echo "Do you accept the terms of $affix license? \c"
+ fi
+- read acceptance
++ acceptance=yes
+ echo
+ if [ "$acceptance" = "yes" ]; then
+ break
+@@ -2443,7 +2443,7 @@
+ else
+ echo "Do you accept the terms of the license? \c"
+ fi
+- read acceptance
++ acceptance=yes
+ echo
+ if [ "$acceptance" = "yes" ]; then
+ break
+@@ -2524,7 +2524,7 @@
+ else
+ echo "Do you accept the terms of the $Platform License? \c"
+ fi
+- read acceptance
++ acceptance=yes
+ echo
+ if [ "$acceptance" = "yes" ]; then
+ break
diff --git a/var/spack/packages/qthreads/package.py b/var/spack/repos/builtin/packages/qthreads/package.py
index dacdb71524..dacdb71524 100644
--- a/var/spack/packages/qthreads/package.py
+++ b/var/spack/repos/builtin/packages/qthreads/package.py
diff --git a/var/spack/packages/ravel/package.py b/var/spack/repos/builtin/packages/ravel/package.py
index 01fa941cfe..d774a0ab86 100644
--- a/var/spack/packages/ravel/package.py
+++ b/var/spack/repos/builtin/packages/ravel/package.py
@@ -4,8 +4,8 @@ class Ravel(Package):
"""Ravel is a parallel communication trace visualization tool that
orders events according to logical time."""
- homepage = "https://github.com/scalability-llnl/ravel"
- url = 'https://github.com/scalability-llnl/ravel/archive/v1.0.0.tar.gz'
+ homepage = "https://github.com/llnl/ravel"
+ url = 'https://github.com/llnl/ravel/archive/v1.0.0.tar.gz'
version('1.0.0', 'b25fece58331c2adfcce76c5036485c2')
diff --git a/var/spack/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py
index 1b870e0e7f..1b870e0e7f 100644
--- a/var/spack/packages/readline/package.py
+++ b/var/spack/repos/builtin/packages/readline/package.py
diff --git a/var/spack/packages/rose/add_spack_compiler_recognition.patch b/var/spack/repos/builtin/packages/rose/add_spack_compiler_recognition.patch
index ce61ae4e4c..ce61ae4e4c 100644
--- a/var/spack/packages/rose/add_spack_compiler_recognition.patch
+++ b/var/spack/repos/builtin/packages/rose/add_spack_compiler_recognition.patch
diff --git a/var/spack/packages/rose/package.py b/var/spack/repos/builtin/packages/rose/package.py
index 1d7294acab..1d7294acab 100644
--- a/var/spack/packages/rose/package.py
+++ b/var/spack/repos/builtin/packages/rose/package.py
diff --git a/var/spack/repos/builtin/packages/rsync/package.py b/var/spack/repos/builtin/packages/rsync/package.py
new file mode 100644
index 0000000000..76aec3096d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rsync/package.py
@@ -0,0 +1,15 @@
+from spack import *
+
+class Rsync(Package):
+ """rsync is an open source utility that provides fast incremental file transfer."""
+ homepage = "https://rsync.samba.org"
+ url = "https://download.samba.org/pub/rsync/rsync-3.1.1.tar.gz"
+
+ version('3.1.2', '0f758d7e000c0f7f7d3792610fad70cb')
+ version('3.1.1', '43bd6676f0b404326eee2d63be3cdcfe')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py
index 6b6242362c..6b6242362c 100644
--- a/var/spack/packages/ruby/package.py
+++ b/var/spack/repos/builtin/packages/ruby/package.py
diff --git a/var/spack/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py
index 72900398d8..72900398d8 100644
--- a/var/spack/packages/samtools/package.py
+++ b/var/spack/repos/builtin/packages/samtools/package.py
diff --git a/var/spack/packages/samtools/samtools1.2.patch b/var/spack/repos/builtin/packages/samtools/samtools1.2.patch
index ead3ab4e2c..ead3ab4e2c 100644
--- a/var/spack/packages/samtools/samtools1.2.patch
+++ b/var/spack/repos/builtin/packages/samtools/samtools1.2.patch
diff --git a/var/spack/repos/builtin/packages/scalasca/package.py b/var/spack/repos/builtin/packages/scalasca/package.py
new file mode 100644
index 0000000000..6de14564b2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scalasca/package.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Scalasca(Package):
+ """
+ Scalasca is a software tool that supports the performance optimization of parallel programs by measuring and
+ analyzing their runtime behavior. The analysis identifies potential performance bottlenecks - in particular those
+ concerning communication and synchronization - and offers guidance in exploring their causes.
+ """
+
+ homepage = "http://www.scalasca.org"
+ url = "http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz"
+
+ version('2.2.2', '2bafce988b0522d18072f7771e491ab9',
+ url='http://apps.fz-juelich.de/scalasca/releases/scalasca/2.2/dist/scalasca-2.2.2.tar.gz')
+
+ version('2.1', 'bab9c2b021e51e2ba187feec442b96e6',
+ url='http://apps.fz-juelich.de/scalasca/releases/scalasca/2.1/dist/scalasca-2.1.tar.gz')
+
+ depends_on("mpi")
+ ##########
+ # Hard-code dependencies for Scalasca according to what stated in the release page
+ # The OTF2 library path should be detected automatically from SCOREP
+ # SCALASCA 2.2.2
+ depends_on("scorep@1.4:", when='@2.2.2')
+ depends_on("cube@4.3:", when='@2.2.2')
+ # SCALASCA 2.1
+ depends_on("scorep@1.3", when='@2.1')
+ depends_on("cube@4.2:", when='@2.1')
+ ##########
+
+ def install(self, spec, prefix):
+ configure_args = ["--prefix=%s" % prefix,
+ "--with-cube=%s" % spec['cube'].prefix.bin,
+ "--enable-shared"]
+ configure(*configure_args)
+ make()
+ make("install") \ No newline at end of file
diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py
new file mode 100644
index 0000000000..5127e814b6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scorep/package.py
@@ -0,0 +1,72 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Scorep(Package):
+ """
+ The Score-P measurement infrastructure is a highly scalable and easy-to-use tool suite for profiling, event
+ tracing, and online analysis of HPC applications.
+ """
+
+ homepage = "http://www.vi-hps.org/projects/score-p"
+ url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz"
+
+ version('1.4.2', '3b9a042b13bdd5836452354e6567f71e',
+ url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.4.2.tar.gz')
+ version('1.3', '9db6f957b7f51fa01377a9537867a55c',
+ url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz')
+
+ ##########
+ # Dependencies for SCORE-P are quite tight. See the homepage for more information.
+ # SCOREP 1.4.2
+ depends_on('otf2@1.5:1.6', when='@1.4.2')
+ depends_on('opari2@1.1.4', when='@1.4.2')
+ depends_on('cube@4.3:4.4', when='@1.4.2')
+ # SCOREP 1.3
+ depends_on("otf2@1.4", when='@1.3')
+ depends_on("opari2@1.1.4", when='@1.3')
+ depends_on("cube@4.2.3", when='@1.3')
+ ##########
+
+ depends_on("mpi")
+ depends_on("papi")
+
+ def install(self, spec, prefix):
+ configure = Executable( join_path(self.stage.source_path, 'configure') )
+ with working_dir('spack-build', create=True):
+ configure_args = ["--prefix=%s" % prefix,
+ "--with-otf2=%s" % spec['otf2'].prefix.bin,
+ "--with-opari2=%s" % spec['opari2'].prefix.bin,
+ "--with-cube=%s" % spec['cube'].prefix.bin,
+ "--with-papi-header=%s" % spec['papi'].prefix.include,
+ "--with-papi-lib=%s" % spec['papi'].prefix.lib,
+ "--enable-shared",
+ "CFLAGS=-fPIC",
+ "CXXFLAGS=-fPIC"]
+ configure(*configure_args)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py
new file mode 100644
index 0000000000..8229ed8686
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scotch/package.py
@@ -0,0 +1,126 @@
+from spack import *
+import os
+
+class Scotch(Package):
+ """Scotch is a software package for graph and mesh/hypergraph
+ partitioning, graph clustering, and sparse matrix ordering."""
+ homepage = "http://www.labri.fr/perso/pelegrin/scotch/"
+ url = "http://gforge.inria.fr/frs/download.php/file/34099/scotch_6.0.3.tar.gz"
+ list_url = "http://gforge.inria.fr/frs/?group_id=248"
+
+ version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc')
+
+ variant('mpi', default=False, description='Activate the compilation of PT-Scotch')
+ variant('compression', default=True, description='Activate the posibility to use compressed files')
+ variant('esmumps', default=False, description='Activate the compilation of the lib esmumps needed by mumps')
+ variant('shared', default=True, description='Build shared libraries')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('zlib', when='+compression')
+ depends_on('flex')
+ depends_on('bison')
+
+ def compiler_specifics(self, makefile_inc, defines):
+ if self.compiler.name == 'gcc':
+ defines.append('-Drestrict=__restrict')
+ elif self.compiler.name == 'intel':
+ defines.append('-restrict')
+
+ makefile_inc.append('CCS = $(CC)')
+
+ if '+mpi' in self.spec:
+ makefile_inc.extend([
+ 'CCP = %s' % os.path.join(self.spec['mpi'].prefix.bin, 'mpicc'),
+ 'CCD = $(CCP)'
+ ])
+ else:
+ makefile_inc.extend([
+ 'CCP = mpicc', # It is set but not used
+ 'CCD = $(CCS)'
+ ])
+
+
+
+ def library_build_type(self, makefile_inc, defines):
+ makefile_inc.extend([
+ 'LIB = .a',
+ 'CLIBFLAGS = ',
+ 'RANLIB = ranlib',
+ 'AR = ar',
+ 'ARFLAGS = -ruv '
+ ])
+
+ @when('+shared')
+ def library_build_type(self, makefile_inc, defines):
+ makefile_inc.extend([
+ 'LIB = .so',
+ 'CLIBFLAGS = -shared -fPIC',
+ 'RANLIB = echo',
+ 'AR = $(CC)',
+ 'ARFLAGS = -shared $(LDFLAGS) -o'
+ ])
+
+ def extra_features(self, makefile_inc, defines):
+ ldflags = []
+
+ if '+compression' in self.spec:
+ defines.append('-DCOMMON_FILE_COMPRESS_GZ')
+ ldflags.append('-L%s -lz' % (self.spec['zlib'].prefix.lib))
+
+ defines.append('-DCOMMON_PTHREAD')
+ ldflags.append('-lm -lrt -pthread')
+
+ makefile_inc.append('LDFLAGS = %s' % ' '.join(ldflags))
+
+ def patch(self):
+ makefile_inc = []
+ defines = [
+ '-DCOMMON_RANDOM_FIXED_SEED',
+ '-DSCOTCH_DETERMINISTIC',
+ '-DSCOTCH_RENAME',
+ '-DIDXSIZE64' ]
+
+ self.library_build_type(makefile_inc, defines)
+ self.compiler_specifics(makefile_inc, defines)
+ self.extra_features(makefile_inc, defines)
+
+ makefile_inc.extend([
+ 'EXE =',
+ 'OBJ = .o',
+ 'MAKE = make',
+ 'CAT = cat',
+ 'LN = ln',
+ 'MKDIR = mkdir',
+ 'MV = mv',
+ 'CP = cp',
+ 'CFLAGS = -O3 %s' % (' '.join(defines)),
+ 'LEX = %s -Pscotchyy -olex.yy.c' % os.path.join(self.spec['flex'].prefix.bin , 'flex'),
+ 'YACC = %s -pscotchyy -y -b y' % os.path.join(self.spec['bison'].prefix.bin, 'bison'),
+ 'prefix = %s' % self.prefix,
+ ''
+ ])
+
+ with working_dir('src'):
+ with open('Makefile.inc', 'w') as fh:
+ fh.write('\n'.join(makefile_inc))
+
+ def install(self, spec, prefix):
+ targets = ['scotch']
+ if '+mpi' in self.spec:
+ targets.append('ptscotch')
+
+ if '+esmumps' in self.spec:
+ targets.append('esmumps')
+ if '+mpi' in self.spec:
+ targets.append('ptesmumps')
+
+ with working_dir('src'):
+ for app in targets:
+ make(app, parallel=(not app=='ptesmumps'))
+
+
+ install_tree('bin', prefix.bin)
+ install_tree('lib', prefix.lib)
+ install_tree('include', prefix.include)
+ install_tree('man/man1', prefix.share_man1)
+
diff --git a/var/spack/packages/scr/package.py b/var/spack/repos/builtin/packages/scr/package.py
index 9fb758f072..1408dce678 100644
--- a/var/spack/packages/scr/package.py
+++ b/var/spack/repos/builtin/packages/scr/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py
index 9eda11df15..9eda11df15 100644
--- a/var/spack/packages/silo/package.py
+++ b/var/spack/repos/builtin/packages/silo/package.py
diff --git a/var/spack/packages/snappy/package.py b/var/spack/repos/builtin/packages/snappy/package.py
index c8f9ceef7d..c8f9ceef7d 100644
--- a/var/spack/packages/snappy/package.py
+++ b/var/spack/repos/builtin/packages/snappy/package.py
diff --git a/var/spack/repos/builtin/packages/sparsehash/package.py b/var/spack/repos/builtin/packages/sparsehash/package.py
new file mode 100644
index 0000000000..7decaeb89b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sparsehash/package.py
@@ -0,0 +1,14 @@
+from spack import *
+
+class Sparsehash(Package):
+ """Sparse and dense hash-tables for C++ by Google"""
+ homepage = "https://github.com/sparsehash/sparsehash"
+ url = "https://github.com/sparsehash/sparsehash/archive/sparsehash-2.0.3.tar.gz"
+
+ version('2.0.3', 'd8d5e2538c1c25577b3f066d7a55e99e')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/packages/spindle/package.py b/var/spack/repos/builtin/packages/spindle/package.py
index 06a1e14284..a20753458a 100644
--- a/var/spack/packages/spindle/package.py
+++ b/var/spack/repos/builtin/packages/spindle/package.py
@@ -6,7 +6,7 @@
# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/repos/builtin/packages/spot/package.py b/var/spack/repos/builtin/packages/spot/package.py
new file mode 100644
index 0000000000..9e539277ae
--- /dev/null
+++ b/var/spack/repos/builtin/packages/spot/package.py
@@ -0,0 +1,18 @@
+from spack import *
+import os
+
+class Spot(Package):
+ """Spot is a C++11 library for omega-automata manipulation and model checking."""
+ homepage = "https://spot.lrde.epita.fr/index.html"
+ url = "http://www.lrde.epita.fr/dload/spot/spot-1.99.3.tar.gz"
+
+ version('1.99.3', 'd53adcb2d0fe7c69f45d4e595a58254e')
+
+ #depends_on("gcc@4.8:")
+ depends_on("python@3.2:")
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py
index 734b0b6cb6..1cf2d30239 100644
--- a/var/spack/packages/sqlite/package.py
+++ b/var/spack/repos/builtin/packages/sqlite/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/stat/configure_mpicxx.patch b/var/spack/repos/builtin/packages/stat/configure_mpicxx.patch
index e09056d95c..e09056d95c 100644
--- a/var/spack/packages/stat/configure_mpicxx.patch
+++ b/var/spack/repos/builtin/packages/stat/configure_mpicxx.patch
diff --git a/var/spack/packages/stat/package.py b/var/spack/repos/builtin/packages/stat/package.py
index 5d81e62731..5d81e62731 100644
--- a/var/spack/packages/stat/package.py
+++ b/var/spack/repos/builtin/packages/stat/package.py
diff --git a/var/spack/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py
index 8b784c8c3c..7e025a8244 100644
--- a/var/spack/packages/sundials/package.py
+++ b/var/spack/repos/builtin/packages/sundials/package.py
@@ -6,7 +6,7 @@
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
diff --git a/var/spack/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py
index d7a3d815b9..8d46c4fe46 100644
--- a/var/spack/packages/swig/package.py
+++ b/var/spack/repos/builtin/packages/swig/package.py
@@ -6,7 +6,7 @@
# Written by Matthew LeGendre, legendre1@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
-# For details, see https://scalability-llnl.github.io/spack
+# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
@@ -38,6 +38,8 @@ class Swig(Package):
version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41')
+ depends_on('pcre')
+
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
diff --git a/var/spack/repos/builtin/packages/szip/package.py b/var/spack/repos/builtin/packages/szip/package.py
new file mode 100644
index 0000000000..c48c5b431e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/szip/package.py
@@ -0,0 +1,21 @@
+from spack import *
+
+class Szip(Package):
+ """Szip is an implementation of the extended-Rice lossless compression algorithm.
+ It provides lossless compression of scientific data, and is provided with HDF
+ software products."""
+
+ homepage = "https://www.hdfgroup.org/doc_resource/SZIP/"
+ url = "http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz"
+
+ version('2.1', '902f831bcefb69c6b635374424acbead')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix,
+ '--enable-production',
+ '--enable-shared',
+ '--enable-static',
+ '--enable-encoding')
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/tar/package.py b/var/spack/repos/builtin/packages/tar/package.py
new file mode 100644
index 0000000000..539174017c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tar/package.py
@@ -0,0 +1,13 @@
+from spack import *
+
+class Tar(Package):
+ """GNU Tar provides the ability to create tar archives, as well as various other kinds of manipulation."""
+ homepage = "https://www.gnu.org/software/tar/"
+ url = "http://ftp.gnu.org/gnu/tar/tar-1.28.tar.gz"
+
+ version('1.28', '6ea3dbea1f2b0409b234048e021a9fd7')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make('install')
diff --git a/var/spack/packages/task/package.py b/var/spack/repos/builtin/packages/task/package.py
index 07f44cc45b..07f44cc45b 100644
--- a/var/spack/packages/task/package.py
+++ b/var/spack/repos/builtin/packages/task/package.py
diff --git a/var/spack/packages/taskd/package.py b/var/spack/repos/builtin/packages/taskd/package.py
index 66bc0cb484..66bc0cb484 100644
--- a/var/spack/packages/taskd/package.py
+++ b/var/spack/repos/builtin/packages/taskd/package.py
diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py
new file mode 100644
index 0000000000..31492397d8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tau/package.py
@@ -0,0 +1,139 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+import os
+import os.path
+
+from llnl.util.filesystem import join_path
+
+class Tau(Package):
+ """
+ A portable profiling and tracing toolkit for performance
+ analysis of parallel programs written in Fortran, C, C++, UPC,
+ Java, Python.
+ """
+ homepage = "http://www.cs.uoregon.edu/research/tau"
+ url = "https://www.cs.uoregon.edu/research/tau/tau_releases/tau-2.25.tar.gz"
+
+ version('2.25', '46cd48fa3f3c4ce0197017b3158a2b43')
+ version('2.24.1', '6635ece6d1f08215b02f5d0b3c1e971b')
+ version('2.24', '57ce33539c187f2e5ec68f0367c76db4')
+ version('2.23.1', '6593b47ae1e7a838e632652f0426fe72')
+
+ # TODO : shmem variant missing
+ variant('download', default=False, description='Downloads and builds various dependencies')
+ variant('scorep', default=False, description='Activates SCOREP support')
+ variant('openmp', default=True, description='Use OpenMP threads')
+ variant('mpi', default=True, description='Specify use of TAU MPI wrapper library')
+ variant('phase', default=True, description='Generate phase based profiles')
+ variant('comm', default=True, description=' Generate profiles with MPI communicator info')
+
+ # TODO : Try to build direct OTF2 support? Some parts of the OTF support library in TAU are non-conformant,
+ # TODO : and fail at compile-time. Further, SCOREP is compiled with OTF2 support.
+ depends_on('pdt') # Required for TAU instrumentation
+ depends_on('scorep', when='+scorep')
+ depends_on('binutils', when='~download')
+ depends_on('mpi', when='+mpi')
+
+ def set_compiler_options(self):
+
+ useropt = ["-O2", self.rpath_args]
+
+ ##########
+ # Selecting a compiler with TAU configure is quite tricky:
+ # 1 - compilers are mapped to a given set of strings (and spack cc, cxx, etc. wrappers are not among them)
+ # 2 - absolute paths are not allowed
+ # 3 - the usual environment variables seems not to be checked ('CC', 'CXX' and 'FC')
+ # 4 - if no -cc=<compiler> -cxx=<compiler> is passed tau is built with system compiler silently
+ # (regardless of what %<compiler> is used in the spec)
+ #
+ # In the following we give TAU what he expects and put compilers into PATH
+ compiler_path = os.path.dirname(self.compiler.cc)
+ os.environ['PATH'] = ':'.join([compiler_path, os.environ['PATH']])
+ compiler_options = ['-c++=%s' % self.compiler.cxx_names[0],
+ '-cc=%s' % self.compiler.cc_names[0]]
+ if self.compiler.fc:
+ compiler_options.append('-fortran=%s' % self.compiler.fc_names[0])
+ ##########
+
+ # Construct the string of custom compiler flags and append it to compiler related options
+ useropt = ' '.join(useropt)
+ useropt = "-useropt=%s" % useropt
+ compiler_options.append(useropt)
+ return compiler_options
+
+ def install(self, spec, prefix):
+ # TAU isn't happy with directories that have '@' in the path. Sigh.
+ change_sed_delimiter('@', ';', 'configure')
+ change_sed_delimiter('@', ';', 'utils/FixMakefile')
+ change_sed_delimiter('@', ';', 'utils/FixMakefile.sed.default')
+
+ # TAU configure, despite the name , seems to be a manually written script (nothing related to autotools).
+ # As such it has a few #peculiarities# that make this build quite hackish.
+ options = ["-prefix=%s" % prefix,
+ "-iowrapper",
+ "-pdt=%s" % spec['pdt'].prefix]
+ # If download is active, download and build suggested dependencies
+ if '+download' in spec:
+ options.extend(['-bfd=download',
+ '-unwind=download',
+ '-asmdex=download'])
+ else:
+ options.extend(["-bfd=%s" % spec['binutils'].prefix])
+ # TODO : unwind and asmdex are still missing
+
+ if '+scorep' in spec:
+ options.append("-scorep=%s" % spec['scorep'].prefix)
+
+ if '+openmp' in spec:
+ options.append('-openmp')
+
+ if '+mpi' in spec:
+ options.append('-mpi')
+
+ if '+phase' in spec:
+ options.append('-PROFILEPHASE')
+
+ if '+comm' in spec:
+ options.append('-PROFILECOMMUNICATORS')
+
+ compiler_specific_options = self.set_compiler_options()
+ options.extend(compiler_specific_options)
+ configure(*options)
+ make("install")
+
+ # Link arch-specific directories into prefix since there is
+ # only one arch per prefix the way spack installs.
+ self.link_tau_arch_dirs()
+
+ def link_tau_arch_dirs(self):
+ for subdir in os.listdir(self.prefix):
+ for d in ('bin', 'lib'):
+ src = join_path(self.prefix, subdir, d)
+ dest = join_path(self.prefix, d)
+ if os.path.isdir(src) and not os.path.exists(dest):
+ os.symlink(join_path(subdir, d), dest)
diff --git a/var/spack/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py
index 529adf7788..529adf7788 100644
--- a/var/spack/packages/tcl/package.py
+++ b/var/spack/repos/builtin/packages/tcl/package.py
diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py
new file mode 100644
index 0000000000..a83c10c0c1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/texinfo/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Texinfo(Package):
+ """
+ Texinfo is the official documentation format of the GNU project. It was invented by Richard Stallman and Bob
+ Chassell many years ago, loosely based on Brian Reid's Scribe and other formatting languages of the time. It is
+ used by many non-GNU projects as well.FIXME: put a proper description of your package here.
+ """
+ homepage = "https://www.gnu.org/software/texinfo/"
+ url = "http://ftp.gnu.org/gnu/texinfo/texinfo-6.0.tar.gz"
+
+ version('6.0', 'e1a2ef5dce5018b53f0f6eed45b247a7')
+ version('5.2', '1b8f98b80a8e6c50422125e07522e8db')
+ version('5.1', '54e250014fe698fb4832016158747c03')
+ version('5.0', '918432285abe6fe96c98355594c5656a')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/packages/the_silver_searcher/package.py b/var/spack/repos/builtin/packages/the_silver_searcher/package.py
index e4020b6766..e4020b6766 100644
--- a/var/spack/packages/the_silver_searcher/package.py
+++ b/var/spack/repos/builtin/packages/the_silver_searcher/package.py
diff --git a/var/spack/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py
index 0e15052f64..0e15052f64 100644
--- a/var/spack/packages/thrift/package.py
+++ b/var/spack/repos/builtin/packages/thrift/package.py
diff --git a/var/spack/packages/tk/package.py b/var/spack/repos/builtin/packages/tk/package.py
index 96736f6f95..96736f6f95 100644
--- a/var/spack/packages/tk/package.py
+++ b/var/spack/repos/builtin/packages/tk/package.py
diff --git a/var/spack/packages/tmux/package.py b/var/spack/repos/builtin/packages/tmux/package.py
index 23d36db427..23d36db427 100644
--- a/var/spack/packages/tmux/package.py
+++ b/var/spack/repos/builtin/packages/tmux/package.py
diff --git a/var/spack/packages/tmuxinator/package.py b/var/spack/repos/builtin/packages/tmuxinator/package.py
index 26c061cbd6..77ae063e5d 100644
--- a/var/spack/packages/tmuxinator/package.py
+++ b/var/spack/repos/builtin/packages/tmuxinator/package.py
@@ -5,7 +5,7 @@ class Tmuxinator(Package):
homepage = "https://github.com/tmuxinator/tmuxinator"
url = "https://github.com/tmuxinator/tmuxinator"
- version('0.6.11',
+ version('0.6.11',
git='https://github.com/tmuxinator/tmuxinator',
tag='v0.6.11')
@@ -13,5 +13,4 @@ class Tmuxinator(Package):
def install(self, spec, prefix):
gem('build', 'tmuxinator.gemspec')
- gem('install', 'tmuxinator-{}.gem'.format(self.version))
-
+ gem('install', 'tmuxinator-{0}.gem'.format(self.version))
diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py
new file mode 100644
index 0000000000..7c43f796a4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/trilinos/package.py
@@ -0,0 +1,50 @@
+from spack import *
+
+
+class Trilinos(Package):
+ """
+ The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented
+ software framework for the solution of large-scale, complex multi-physics engineering and scientific problems.
+ A unique design feature of Trilinos is its focus on packages.
+ """
+ homepage = "https://trilinos.org/"
+ url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz"
+
+ version('12.2.1', '6161926ea247863c690e927687f83be9')
+ version('12.0.1', 'bd99741d047471e127b8296b2ec08017')
+ version('11.14.3', '2f4f83f8333e4233c57d0f01c4b57426')
+ version('11.14.2', 'a43590cf896c677890d75bfe75bc6254')
+ version('11.14.1', '40febc57f76668be8b6a77b7607bb67f')
+
+ variant('mpi', default=True, description='Add a dependency on MPI and enables MPI dependent packages')
+
+ # Everything should be compiled with -fpic
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('boost')
+ depends_on('netcdf')
+ depends_on('matio')
+ depends_on('glm')
+ depends_on('swig')
+ depends_on('mpi', when='+mpi')
+
+ def install(self, spec, prefix):
+
+ options = [
+ '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON',
+ '-DTrilinos_ENABLE_TESTS:BOOL=OFF',
+ '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF',
+ '-DBUILD_SHARED_LIBS:BOOL=ON',
+ '-DBLAS_LIBRARY_DIRS:PATH=%s' % spec['blas'].prefix,
+ '-DLAPACK_LIBRARY_DIRS:PATH=%s' % spec['lapack'].prefix
+ ]
+ if '+mpi' in spec:
+ mpi_options = ['-DTPL_ENABLE_MPI:BOOL=ON']
+ options.extend(mpi_options)
+
+ # -DCMAKE_INSTALL_PREFIX and all the likes...
+ options.extend(std_cmake_args)
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
+ make()
+ make('install')
diff --git a/var/spack/packages/uncrustify/package.py b/var/spack/repos/builtin/packages/uncrustify/package.py
index d3f2d1b473..d3f2d1b473 100644
--- a/var/spack/packages/uncrustify/package.py
+++ b/var/spack/repos/builtin/packages/uncrustify/package.py
diff --git a/var/spack/packages/util-linux/package.py b/var/spack/repos/builtin/packages/util-linux/package.py
index cb7ceabf57..cb7ceabf57 100644
--- a/var/spack/packages/util-linux/package.py
+++ b/var/spack/repos/builtin/packages/util-linux/package.py
diff --git a/var/spack/repos/builtin/packages/valgrind/package.py b/var/spack/repos/builtin/packages/valgrind/package.py
new file mode 100644
index 0000000000..0b030d73e9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/valgrind/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Valgrind(Package):
+ """
+ Valgrind is an instrumentation framework for building dynamic analysis tools. There are Valgrind tools that can
+ automatically detect many memory management and threading bugs, and profile your programs in detail. You can also
+ use Valgrind to build new tools.
+
+ Valgrind is Open Source / Free Software, and is freely available under the GNU General Public License, version 2.
+ """
+ homepage = "http://valgrind.org/"
+ url = "http://valgrind.org/downloads/valgrind-3.11.0.tar.bz2"
+
+ version('3.11.0', '4ea62074da73ae82e0162d6550d3f129')
+ version('3.10.1', '60ddae962bc79e7c95cfc4667245707f')
+ version('3.10.0', '7c311a72a20388aceced1aa5573ce970')
+
+ variant('mpi', default=True, description='Activates MPI support for valgrind')
+ variant('boost', default=True, description='Activates boost support for valgrind')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('boost', when='+boost')
+
+ def install(self, spec, prefix):
+ options = ['--prefix=%s' % prefix,
+ '--enable-ubsan']
+ configure(*options)
+ make()
+ make("install")
diff --git a/var/spack/packages/vim/package.py b/var/spack/repos/builtin/packages/vim/package.py
index 4099b3257f..4099b3257f 100644
--- a/var/spack/packages/vim/package.py
+++ b/var/spack/repos/builtin/packages/vim/package.py
diff --git a/var/spack/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py
index 4a27a8fedb..4a27a8fedb 100644
--- a/var/spack/packages/vtk/package.py
+++ b/var/spack/repos/builtin/packages/vtk/package.py
diff --git a/var/spack/packages/wget/package.py b/var/spack/repos/builtin/packages/wget/package.py
index c8fd025122..55728b0515 100644
--- a/var/spack/packages/wget/package.py
+++ b/var/spack/repos/builtin/packages/wget/package.py
@@ -8,9 +8,10 @@ class Wget(Package):
etc."""
homepage = "http://www.gnu.org/software/wget/"
- url = "http://ftp.gnu.org/gnu/wget/wget-1.16.tar.xz"
+ url = "http://ftp.gnu.org/gnu/wget/wget-1.16.tar.gz"
- version('1.16', 'fe102975ab3a6c049777883f1bb9ad07')
+ version('1.17', 'c4c4727766f24ac716936275014a0536')
+ version('1.16', '293a37977c41b5522f781d3a3a078426')
depends_on("openssl")
diff --git a/var/spack/packages/wx/package.py b/var/spack/repos/builtin/packages/wx/package.py
index 1813a8c8a5..206fde7775 100644
--- a/var/spack/packages/wx/package.py
+++ b/var/spack/repos/builtin/packages/wx/package.py
@@ -16,6 +16,8 @@ class Wx(Package):
version('3.0.1', 'dad1f1cd9d4c370cbc22700dc492da31',
url="https://sourceforge.net/projects/wxwindows/files/3.0.1/wxWidgets-3.0.1.tar.bz2")
+ depends_on('gtkplus')
+
def install(self, spec, prefix):
configure("--prefix=%s" % prefix, "--enable-unicode", "--disable-precomp-headers")
diff --git a/var/spack/packages/wxpropgrid/package.py b/var/spack/repos/builtin/packages/wxpropgrid/package.py
index 790cead517..790cead517 100644
--- a/var/spack/packages/wxpropgrid/package.py
+++ b/var/spack/repos/builtin/packages/wxpropgrid/package.py
diff --git a/var/spack/packages/xcb-proto/package.py b/var/spack/repos/builtin/packages/xcb-proto/package.py
index 17a94bd892..17a94bd892 100644
--- a/var/spack/packages/xcb-proto/package.py
+++ b/var/spack/repos/builtin/packages/xcb-proto/package.py
diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py
new file mode 100644
index 0000000000..b59ab178ae
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xerces-c/package.py
@@ -0,0 +1,36 @@
+# FIXME:
+# This is a template package file for Spack. We've conveniently
+# put "FIXME" labels next to all the things you'll want to change.
+#
+# Once you've edited all the FIXME's, delete this whole message,
+# save this file, and test out your package like this:
+#
+# spack install xerces-c
+#
+# You can always get back here to change things with:
+#
+# spack edit xerces-c
+#
+# See the spack documentation for more information on building
+# packages.
+#
+from spack import *
+
+class XercesC(Package):
+ """ Xerces-C++ is a validating XML parser written in a portable subset of C++.
+ Xerces-C++ makes it easy to give your application the ability to read and
+ write XML data. A shared library is provided for parsing, generating,
+ manipulating, and validating XML documents using the DOM, SAX, and SAX2 APIs.
+ """
+
+ homepage = "https://xerces.apache.org/xerces-c"
+ url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.2.tar.gz"
+ version('3.1.2', '9eb1048939e88d6a7232c67569b23985')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix,
+ "--disable-network")
+ make("clean")
+ make()
+ make("install")
+
diff --git a/var/spack/packages/xz/package.py b/var/spack/repos/builtin/packages/xz/package.py
index 88c5793018..ba6c9733a7 100644
--- a/var/spack/packages/xz/package.py
+++ b/var/spack/repos/builtin/packages/xz/package.py
@@ -8,9 +8,13 @@ class Xz(Package):
homepage = "http://tukaani.org/xz/"
url = "http://tukaani.org/xz/xz-5.2.0.tar.bz2"
- version('5.2.0', '867cc8611760240ebf3440bd6e170bb9')
-
+ version('5.2.0', '867cc8611760240ebf3440bd6e170bb9',
+ url = 'http://tukaani.org/xz/xz-5.2.0.tar.bz2')
+ version('5.2.2', 'f90c9a0c8b259aee2234c4e0d7fd70af',
+ url = 'http://tukaani.org/xz/xz-5.2.2.tar.bz2')
+
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
make()
make("install")
+
diff --git a/var/spack/packages/yasm/package.py b/var/spack/repos/builtin/packages/yasm/package.py
index d3a695b16d..d3a695b16d 100644
--- a/var/spack/packages/yasm/package.py
+++ b/var/spack/repos/builtin/packages/yasm/package.py
diff --git a/var/spack/packages/zeromq/package.py b/var/spack/repos/builtin/packages/zeromq/package.py
index b5a1e3d4cd..b5a1e3d4cd 100644
--- a/var/spack/packages/zeromq/package.py
+++ b/var/spack/repos/builtin/packages/zeromq/package.py
diff --git a/var/spack/packages/zlib/package.py b/var/spack/repos/builtin/packages/zlib/package.py
index 2770f781ac..2770f781ac 100644
--- a/var/spack/packages/zlib/package.py
+++ b/var/spack/repos/builtin/packages/zlib/package.py
diff --git a/var/spack/repos/builtin/packages/zsh/package.py b/var/spack/repos/builtin/packages/zsh/package.py
new file mode 100644
index 0000000000..06665f0c83
--- /dev/null
+++ b/var/spack/repos/builtin/packages/zsh/package.py
@@ -0,0 +1,20 @@
+from spack import *
+
+class Zsh(Package):
+ """
+ Zsh is a shell designed for interactive use, although it is also a powerful
+ scripting language. Many of the useful features of bash, ksh, and tcsh were
+ incorporated into zsh; many original features were added.
+ """
+ homepage = "http://www.zsh.org"
+ url = "http://downloads.sourceforge.net/project/zsh/zsh/5.1.1/zsh-5.1.1.tar.gz"
+
+ version('5.1.1', checksum='8ba28a9ef82e40c3a271602f18343b2f')
+
+ depends_on("pcre")
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/repo.yaml b/var/spack/repos/builtin/repo.yaml
new file mode 100644
index 0000000000..54b282db6b
--- /dev/null
+++ b/var/spack/repos/builtin/repo.yaml
@@ -0,0 +1,2 @@
+repo:
+ namespace: builtin