summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.travis.yml19
-rw-r--r--README.md3
-rwxr-xr-xbin/spack17
-rw-r--r--etc/spack/defaults/packages.yaml13
-rw-r--r--lib/spack/docs/conf.py8
-rw-r--r--lib/spack/docs/contribution_guide.rst2
-rw-r--r--lib/spack/docs/developer_guide.rst8
-rw-r--r--lib/spack/docs/getting_started.rst4
-rw-r--r--lib/spack/docs/index.rst6
-rw-r--r--lib/spack/docs/packaging_guide.rst26
-rw-r--r--lib/spack/external/_pytest/pytester.py2
-rw-r--r--lib/spack/external/functools_backport.py17
-rw-r--r--lib/spack/external/ordereddict_backport.py8
-rwxr-xr-xlib/spack/external/pyqver2.py70
-rwxr-xr-xlib/spack/external/pyqver3.py248
-rw-r--r--lib/spack/external/six.py886
-rw-r--r--lib/spack/external/yaml/README2
-rw-r--r--lib/spack/external/yaml/lib/yaml/__init__.py (renamed from lib/spack/external/yaml/__init__.py)2
-rw-r--r--lib/spack/external/yaml/lib/yaml/composer.py (renamed from lib/spack/external/yaml/composer.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/constructor.py (renamed from lib/spack/external/yaml/constructor.py)3
-rw-r--r--lib/spack/external/yaml/lib/yaml/cyaml.py85
-rw-r--r--lib/spack/external/yaml/lib/yaml/dumper.py (renamed from lib/spack/external/yaml/dumper.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/emitter.py (renamed from lib/spack/external/yaml/emitter.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/error.py (renamed from lib/spack/external/yaml/error.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/events.py (renamed from lib/spack/external/yaml/events.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/loader.py (renamed from lib/spack/external/yaml/loader.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/nodes.py (renamed from lib/spack/external/yaml/nodes.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/parser.py (renamed from lib/spack/external/yaml/parser.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/reader.py (renamed from lib/spack/external/yaml/reader.py)9
-rw-r--r--lib/spack/external/yaml/lib/yaml/representer.py (renamed from lib/spack/external/yaml/representer.py)4
-rw-r--r--lib/spack/external/yaml/lib/yaml/resolver.py (renamed from lib/spack/external/yaml/resolver.py)5
-rw-r--r--lib/spack/external/yaml/lib/yaml/scanner.py (renamed from lib/spack/external/yaml/scanner.py)8
-rw-r--r--lib/spack/external/yaml/lib/yaml/serializer.py (renamed from lib/spack/external/yaml/serializer.py)0
-rw-r--r--lib/spack/external/yaml/lib/yaml/tokens.py (renamed from lib/spack/external/yaml/tokens.py)0
-rw-r--r--lib/spack/external/yaml/lib3/yaml/__init__.py312
-rw-r--r--lib/spack/external/yaml/lib3/yaml/composer.py139
-rw-r--r--lib/spack/external/yaml/lib3/yaml/constructor.py686
-rw-r--r--lib/spack/external/yaml/lib3/yaml/cyaml.py85
-rw-r--r--lib/spack/external/yaml/lib3/yaml/dumper.py62
-rw-r--r--lib/spack/external/yaml/lib3/yaml/emitter.py1137
-rw-r--r--lib/spack/external/yaml/lib3/yaml/error.py75
-rw-r--r--lib/spack/external/yaml/lib3/yaml/events.py86
-rw-r--r--lib/spack/external/yaml/lib3/yaml/loader.py40
-rw-r--r--lib/spack/external/yaml/lib3/yaml/nodes.py49
-rw-r--r--lib/spack/external/yaml/lib3/yaml/parser.py589
-rw-r--r--lib/spack/external/yaml/lib3/yaml/reader.py192
-rw-r--r--lib/spack/external/yaml/lib3/yaml/representer.py387
-rw-r--r--lib/spack/external/yaml/lib3/yaml/resolver.py227
-rw-r--r--lib/spack/external/yaml/lib3/yaml/scanner.py1444
-rw-r--r--lib/spack/external/yaml/lib3/yaml/serializer.py111
-rw-r--r--lib/spack/external/yaml/lib3/yaml/tokens.py104
-rw-r--r--lib/spack/llnl/util/filesystem.py4
-rw-r--r--lib/spack/llnl/util/lang.py11
-rw-r--r--lib/spack/llnl/util/tty/__init__.py8
-rw-r--r--lib/spack/llnl/util/tty/colify.py20
-rw-r--r--lib/spack/llnl/util/tty/log.py8
-rw-r--r--lib/spack/spack/__init__.py7
-rw-r--r--lib/spack/spack/architecture.py2
-rw-r--r--lib/spack/spack/build_environment.py8
-rw-r--r--lib/spack/spack/build_systems/autotools.py10
-rw-r--r--lib/spack/spack/build_systems/perl.py117
-rw-r--r--lib/spack/spack/build_systems/python.py80
-rw-r--r--lib/spack/spack/cmd/__init__.py6
-rw-r--r--lib/spack/spack/cmd/arch.py6
-rw-r--r--lib/spack/spack/cmd/build.py3
-rw-r--r--lib/spack/spack/cmd/common/arguments.py2
-rw-r--r--lib/spack/spack/cmd/compiler.py35
-rw-r--r--lib/spack/spack/cmd/configure.py4
-rw-r--r--lib/spack/spack/cmd/create.py70
-rw-r--r--lib/spack/spack/cmd/dependents.py2
-rw-r--r--lib/spack/spack/cmd/env.py5
-rw-r--r--lib/spack/spack/cmd/flake8.py26
-rw-r--r--lib/spack/spack/cmd/graph.py5
-rw-r--r--lib/spack/spack/cmd/info.py55
-rw-r--r--lib/spack/spack/cmd/list.py30
-rw-r--r--lib/spack/spack/cmd/location.py19
-rw-r--r--lib/spack/spack/cmd/md5.py2
-rw-r--r--lib/spack/spack/cmd/mirror.py4
-rw-r--r--lib/spack/spack/cmd/pkg.py15
-rw-r--r--lib/spack/spack/cmd/repo.py4
-rw-r--r--lib/spack/spack/cmd/spec.py23
-rw-r--r--lib/spack/spack/cmd/test.py6
-rw-r--r--lib/spack/spack/cmd/url.py92
-rw-r--r--lib/spack/spack/cmd/versions.py6
-rw-r--r--lib/spack/spack/compiler.py4
-rw-r--r--lib/spack/spack/compilers/__init__.py4
-rw-r--r--lib/spack/spack/concretize.py203
-rw-r--r--lib/spack/spack/config.py14
-rw-r--r--lib/spack/spack/database.py15
-rw-r--r--lib/spack/spack/directives.py30
-rw-r--r--lib/spack/spack/directory_layout.py3
-rw-r--r--lib/spack/spack/environment.py4
-rw-r--r--lib/spack/spack/error.py3
-rw-r--r--lib/spack/spack/fetch_strategy.py25
-rw-r--r--lib/spack/spack/graph.py3
-rw-r--r--lib/spack/spack/hooks/case_consistency.py6
-rw-r--r--lib/spack/spack/hooks/module_file_generation.py5
-rw-r--r--lib/spack/spack/modules.py22
-rw-r--r--lib/spack/spack/operating_systems/cnl.py2
-rw-r--r--lib/spack/spack/package.py34
-rw-r--r--lib/spack/spack/package_prefs.py337
-rw-r--r--lib/spack/spack/package_test.py18
-rw-r--r--lib/spack/spack/parse.py11
-rw-r--r--lib/spack/spack/provider_index.py11
-rw-r--r--lib/spack/spack/repository.py3
-rw-r--r--lib/spack/spack/spec.py188
-rw-r--r--lib/spack/spack/stage.py20
-rw-r--r--lib/spack/spack/test/architecture.py4
-rw-r--r--lib/spack/spack/test/build_system_guess.py2
-rw-r--r--lib/spack/spack/test/cmd/install.py6
-rw-r--r--lib/spack/spack/test/cmd/url.py21
-rw-r--r--lib/spack/spack/test/compilers.py9
-rw-r--r--lib/spack/spack/test/concretize.py27
-rw-r--r--lib/spack/spack/test/concretize_preferences.py8
-rw-r--r--lib/spack/spack/test/conftest.py16
-rw-r--r--lib/spack/spack/test/data/web/1.html10
-rw-r--r--lib/spack/spack/test/data/web/2.html12
-rw-r--r--lib/spack/spack/test/data/web/3.html11
-rw-r--r--lib/spack/spack/test/data/web/4.html11
-rw-r--r--lib/spack/spack/test/data/web/index.html10
-rw-r--r--lib/spack/spack/test/directory_layout.py18
-rw-r--r--lib/spack/spack/test/graph.py2
-rw-r--r--lib/spack/spack/test/lock.py2
-rw-r--r--lib/spack/spack/test/make_executable.py2
-rw-r--r--lib/spack/spack/test/modules.py6
-rw-r--r--lib/spack/spack/test/multimethod.py2
-rw-r--r--lib/spack/spack/test/package_sanity.py67
-rw-r--r--lib/spack/spack/test/pattern.py1
-rw-r--r--lib/spack/spack/test/provider_index.py7
-rw-r--r--lib/spack/spack/test/python_version.py130
-rw-r--r--lib/spack/spack/test/spec_dag.py24
-rw-r--r--lib/spack/spack/test/spec_semantics.py2
-rw-r--r--lib/spack/spack/test/spec_syntax.py233
-rw-r--r--lib/spack/spack/test/spec_yaml.py9
-rw-r--r--lib/spack/spack/test/url_extrapolate.py101
-rw-r--r--lib/spack/spack/test/url_parse.py794
-rw-r--r--lib/spack/spack/test/url_substitution.py84
-rw-r--r--lib/spack/spack/test/versions.py846
-rw-r--r--lib/spack/spack/test/web.py162
-rw-r--r--lib/spack/spack/url.py516
-rw-r--r--lib/spack/spack/util/crypto.py8
-rw-r--r--lib/spack/spack/util/executable.py10
-rw-r--r--lib/spack/spack/util/multiproc.py3
-rw-r--r--lib/spack/spack/util/naming.py47
-rw-r--r--lib/spack/spack/util/pattern.py45
-rw-r--r--lib/spack/spack/util/prefix.py12
-rw-r--r--lib/spack/spack/util/spack_json.py38
-rw-r--r--lib/spack/spack/util/spack_yaml.py16
-rw-r--r--lib/spack/spack/util/web.py131
-rw-r--r--lib/spack/spack/version.py89
-rwxr-xr-xshare/spack/spack-completion.bash13
-rw-r--r--var/spack/repos/builtin.mock/packages/conflict-parent/package.py46
-rw-r--r--var/spack/repos/builtin.mock/packages/conflict/package.py46
-rw-r--r--var/spack/repos/builtin.mock/packages/multimethod/package.py6
-rw-r--r--var/spack/repos/builtin/packages/ack/package.py1
-rw-r--r--var/spack/repos/builtin/packages/ant/package.py1
-rw-r--r--var/spack/repos/builtin/packages/archer/package.py6
-rw-r--r--var/spack/repos/builtin/packages/autogen/package.py2
-rw-r--r--var/spack/repos/builtin/packages/automake/package.py2
-rw-r--r--var/spack/repos/builtin/packages/bats/package.py38
-rw-r--r--var/spack/repos/builtin/packages/bcftools/package.py2
-rw-r--r--var/spack/repos/builtin/packages/bib2xhtml/package.py3
-rw-r--r--var/spack/repos/builtin/packages/bison/package.py3
-rw-r--r--var/spack/repos/builtin/packages/blast-plus/blast-make-fix2.5.0.diff22
-rw-r--r--var/spack/repos/builtin/packages/blast-plus/package.py202
-rw-r--r--var/spack/repos/builtin/packages/blat/package.py44
-rw-r--r--var/spack/repos/builtin/packages/boost/package.py12
-rw-r--r--var/spack/repos/builtin/packages/bowtie2/bowtie2-2.2.5.patch (renamed from var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch)0
-rw-r--r--var/spack/repos/builtin/packages/bowtie2/bowtie2-2.3.1.patch16
-rw-r--r--var/spack/repos/builtin/packages/bowtie2/package.py15
-rw-r--r--var/spack/repos/builtin/packages/cddlib/package.py18
-rw-r--r--var/spack/repos/builtin/packages/cdo/package.py10
-rw-r--r--var/spack/repos/builtin/packages/cfitsio/package.py1
-rw-r--r--var/spack/repos/builtin/packages/cmake/package.py2
-rw-r--r--var/spack/repos/builtin/packages/cp2k/package.py25
-rw-r--r--var/spack/repos/builtin/packages/cppad/package.py5
-rw-r--r--var/spack/repos/builtin/packages/cppcheck/package.py6
-rw-r--r--var/spack/repos/builtin/packages/cryptopp/package.py1
-rw-r--r--var/spack/repos/builtin/packages/cub/package.py39
-rw-r--r--var/spack/repos/builtin/packages/cvs/package.py36
-rw-r--r--var/spack/repos/builtin/packages/dakota/package.py4
-rw-r--r--var/spack/repos/builtin/packages/datamash/package.py2
-rw-r--r--var/spack/repos/builtin/packages/direnv/package.py39
-rw-r--r--var/spack/repos/builtin/packages/double-conversion/package.py47
-rw-r--r--var/spack/repos/builtin/packages/easybuild/package.py38
-rw-r--r--var/spack/repos/builtin/packages/elfutils/package.py6
-rw-r--r--var/spack/repos/builtin/packages/elpa/package.py63
-rw-r--r--var/spack/repos/builtin/packages/exonerate/package.py2
-rw-r--r--var/spack/repos/builtin/packages/ferret/package.py11
-rw-r--r--var/spack/repos/builtin/packages/fio/package.py56
-rw-r--r--var/spack/repos/builtin/packages/foam-extend/package.py664
-rw-r--r--var/spack/repos/builtin/packages/gcc/package.py2
-rw-r--r--var/spack/repos/builtin/packages/gdal/package.py2
-rw-r--r--var/spack/repos/builtin/packages/gdk-pixbuf/package.py4
-rw-r--r--var/spack/repos/builtin/packages/glog/package.py5
-rw-r--r--var/spack/repos/builtin/packages/gnutls/package.py26
-rw-r--r--var/spack/repos/builtin/packages/go/package.py7
-rw-r--r--var/spack/repos/builtin/packages/googletest/package.py36
-rw-r--r--var/spack/repos/builtin/packages/gource/package.py4
-rw-r--r--var/spack/repos/builtin/packages/hdf5-blosc/package.py24
-rw-r--r--var/spack/repos/builtin/packages/htslib/package.py2
-rw-r--r--var/spack/repos/builtin/packages/httpie/package.py47
-rw-r--r--var/spack/repos/builtin/packages/hwloc/package.py2
-rw-r--r--var/spack/repos/builtin/packages/hydra/package.py2
-rw-r--r--var/spack/repos/builtin/packages/ibmisc/package.py4
-rw-r--r--var/spack/repos/builtin/packages/icet/package.py6
-rw-r--r--var/spack/repos/builtin/packages/image-magick/package.py3
-rw-r--r--var/spack/repos/builtin/packages/intel-daal/package.py (renamed from var/spack/repos/builtin/packages/daal/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/intel-ipp/package.py (renamed from var/spack/repos/builtin/packages/ipp/package.py)4
-rw-r--r--var/spack/repos/builtin/packages/intel-mkl/package.py22
-rw-r--r--var/spack/repos/builtin/packages/iozone/package.py53
-rw-r--r--var/spack/repos/builtin/packages/jdk/package.py4
-rw-r--r--var/spack/repos/builtin/packages/jq/package.py37
-rw-r--r--var/spack/repos/builtin/packages/kaldi/package.py107
-rw-r--r--var/spack/repos/builtin/packages/libedit/package.py8
-rw-r--r--var/spack/repos/builtin/packages/libgd/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libint/package.py34
-rw-r--r--var/spack/repos/builtin/packages/libpfm4/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libsodium/package.py1
-rw-r--r--var/spack/repos/builtin/packages/libxsmm/package.py3
-rw-r--r--var/spack/repos/builtin/packages/libxstream/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libzip/package.py35
-rw-r--r--var/spack/repos/builtin/packages/llvm-openmp-ompt/package.py6
-rw-r--r--var/spack/repos/builtin/packages/lmod/package.py1
-rw-r--r--var/spack/repos/builtin/packages/meep/package.py2
-rw-r--r--var/spack/repos/builtin/packages/metis/package.py20
-rw-r--r--var/spack/repos/builtin/packages/mfem/package.py23
-rw-r--r--var/spack/repos/builtin/packages/miniconda2/package.py43
-rw-r--r--var/spack/repos/builtin/packages/miniconda3/package.py43
-rw-r--r--var/spack/repos/builtin/packages/mitos/package.py5
-rw-r--r--var/spack/repos/builtin/packages/moab/package.py2
-rw-r--r--var/spack/repos/builtin/packages/mpich/package.py2
-rw-r--r--var/spack/repos/builtin/packages/mummer/package.py55
-rw-r--r--var/spack/repos/builtin/packages/mvapich2/package.py7
-rw-r--r--var/spack/repos/builtin/packages/mxml/package.py4
-rw-r--r--var/spack/repos/builtin/packages/nccl/package.py4
-rw-r--r--var/spack/repos/builtin/packages/ncftp/package.py37
-rw-r--r--var/spack/repos/builtin/packages/ncl/package.py2
-rw-r--r--var/spack/repos/builtin/packages/netlib-scalapack/package.py21
-rw-r--r--var/spack/repos/builtin/packages/nettle/package.py2
-rw-r--r--var/spack/repos/builtin/packages/nextflow/package.py6
-rw-r--r--var/spack/repos/builtin/packages/nwchem/package.py2
-rw-r--r--var/spack/repos/builtin/packages/oce/package.py5
-rw-r--r--var/spack/repos/builtin/packages/octopus/package.py8
-rw-r--r--var/spack/repos/builtin/packages/oniguruma/package.py34
-rw-r--r--var/spack/repos/builtin/packages/openblas/package.py8
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/openfoam-bin-1612.patch503
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/openfoam-build-1612.patch17
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/openfoam-etc-1612.patch41
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/openfoam-mpi-1612.patch36
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/openfoam-site.patch42
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/package.py722
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/scotch-metis-lib-1612.patch48
-rw-r--r--var/spack/repos/builtin/packages/openfoam-com/zoltan-lib-1612.patch84
-rw-r--r--var/spack/repos/builtin/packages/openfoam-org/openfoam-etc-41.patch25
-rw-r--r--var/spack/repos/builtin/packages/openfoam-org/openfoam-site.patch42
-rw-r--r--var/spack/repos/builtin/packages/openfoam-org/package.py492
-rw-r--r--var/spack/repos/builtin/packages/openfst/package.py15
-rw-r--r--var/spack/repos/builtin/packages/openjpeg/package.py4
-rw-r--r--var/spack/repos/builtin/packages/openmpi/package.py2
-rw-r--r--var/spack/repos/builtin/packages/openssl/package.py2
-rw-r--r--var/spack/repos/builtin/packages/panda/package.py4
-rw-r--r--var/spack/repos/builtin/packages/pango/package.py2
-rw-r--r--var/spack/repos/builtin/packages/parallel/package.py1
-rw-r--r--var/spack/repos/builtin/packages/paraview/gcc-compiler-pv501.patch22
-rw-r--r--var/spack/repos/builtin/packages/paraview/package.py150
-rw-r--r--var/spack/repos/builtin/packages/paraview/stl-reader-pv440.patch11
-rw-r--r--var/spack/repos/builtin/packages/paraview/ui_pqExportStateWizard.patch11
-rw-r--r--var/spack/repos/builtin/packages/parmetis/package.py12
-rw-r--r--var/spack/repos/builtin/packages/patchelf/package.py2
-rw-r--r--var/spack/repos/builtin/packages/perl-dbi/package.py36
-rw-r--r--var/spack/repos/builtin/packages/perl-module-build/package.py41
-rw-r--r--var/spack/repos/builtin/packages/perl-term-readkey/package.py41
-rw-r--r--var/spack/repos/builtin/packages/perl-xml-parser/package.py37
-rw-r--r--var/spack/repos/builtin/packages/perl/package.py103
-rw-r--r--var/spack/repos/builtin/packages/petsc/macos-clang-8.1.0.diff18
-rw-r--r--var/spack/repos/builtin/packages/petsc/package.py7
-rw-r--r--var/spack/repos/builtin/packages/pexsi/make.inc2
-rw-r--r--var/spack/repos/builtin/packages/pexsi/package.py37
-rw-r--r--var/spack/repos/builtin/packages/pigz/package.py46
-rw-r--r--var/spack/repos/builtin/packages/pkg-config/package.py32
-rw-r--r--var/spack/repos/builtin/packages/plumed/package.py54
-rw-r--r--var/spack/repos/builtin/packages/prank/package.py2
-rw-r--r--var/spack/repos/builtin/packages/protobuf/package.py16
-rw-r--r--var/spack/repos/builtin/packages/pruners-ninja/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-abipy/package.py77
-rw-r--r--var/spack/repos/builtin/packages/py-apscheduler/package.py41
-rw-r--r--var/spack/repos/builtin/packages/py-autopep8/package.py13
-rw-r--r--var/spack/repos/builtin/packages/py-bokeh/package.py45
-rw-r--r--var/spack/repos/builtin/packages/py-brian2/package.py50
-rw-r--r--var/spack/repos/builtin/packages/py-cdo/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-dev/__init__.py.patch (renamed from var/spack/repos/builtin/py-dev/__init__.py.patch)0
-rw-r--r--var/spack/repos/builtin/packages/py-dev/package.py (renamed from var/spack/repos/builtin/py-dev/package.py)0
-rw-r--r--var/spack/repos/builtin/packages/py-easybuild-easyblocks/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-easybuild-easyconfigs/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-easybuild-framework/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-html2text/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-markdown/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-mongo/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-monty/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-netcdf4/package.py43
-rw-r--r--var/spack/repos/builtin/packages/py-nose/package.py6
-rw-r--r--var/spack/repos/builtin/packages/py-numpy/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-palettable/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-proj/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-py2cairo/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-pydispatcher/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-pymatgen/package.py54
-rw-r--r--var/spack/repos/builtin/packages/py-pympler/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-pynn/package.py50
-rw-r--r--var/spack/repos/builtin/packages/py-pypar/package.py3
-rw-r--r--var/spack/repos/builtin/packages/py-pytz/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-rtree/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-learn/package.py2
-rw-r--r--var/spack/repos/builtin/packages/py-scipy/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-seaborn/package.py46
-rw-r--r--var/spack/repos/builtin/packages/py-setuptools/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-spglib/package.py (renamed from var/spack/repos/builtin/packages/py-netcdf/package.py)16
-rw-r--r--var/spack/repos/builtin/packages/py-spykeutils/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-tzlocal/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-vsc-base/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-vsc-install/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-wrapt/package.py34
-rw-r--r--var/spack/repos/builtin/packages/python/package.py24
-rw-r--r--var/spack/repos/builtin/packages/qbank/package.py87
-rw-r--r--var/spack/repos/builtin/packages/qt-creator/package.py2
-rw-r--r--var/spack/repos/builtin/packages/qt/package.py10
-rw-r--r--var/spack/repos/builtin/packages/r-lava/package.py2
-rw-r--r--var/spack/repos/builtin/packages/r-packrat/package.py1
-rw-r--r--var/spack/repos/builtin/packages/rempi/package.py39
-rw-r--r--var/spack/repos/builtin/packages/root/package.py6
-rw-r--r--var/spack/repos/builtin/packages/rose/package.py7
-rw-r--r--var/spack/repos/builtin/packages/rust-bindgen/package.py4
-rw-r--r--var/spack/repos/builtin/packages/samtools/package.py5
-rw-r--r--var/spack/repos/builtin/packages/scorep/package.py11
-rw-r--r--var/spack/repos/builtin/packages/scotch/package.py9
-rw-r--r--var/spack/repos/builtin/packages/shiny-server/package.py77
-rw-r--r--var/spack/repos/builtin/packages/silo/package.py5
-rw-r--r--var/spack/repos/builtin/packages/snakemake/package.py39
-rw-r--r--var/spack/repos/builtin/packages/speex/package.py35
-rw-r--r--var/spack/repos/builtin/packages/sph2pipe/cmake.patch10
-rw-r--r--var/spack/repos/builtin/packages/sph2pipe/package.py37
-rw-r--r--var/spack/repos/builtin/packages/sst-dumpi/package.py52
-rw-r--r--var/spack/repos/builtin/packages/sst-macro/package.py64
-rw-r--r--var/spack/repos/builtin/packages/staden-io-lib/package.py37
-rw-r--r--var/spack/repos/builtin/packages/star-ccm-plus/package.py5
-rw-r--r--var/spack/repos/builtin/packages/stream/package.py6
-rw-r--r--var/spack/repos/builtin/packages/stress/package.py37
-rw-r--r--var/spack/repos/builtin/packages/sublime-text/package.py4
-rw-r--r--var/spack/repos/builtin/packages/subversion/package.py34
-rw-r--r--var/spack/repos/builtin/packages/symengine/package.py9
-rw-r--r--var/spack/repos/builtin/packages/tcl/package.py5
-rw-r--r--var/spack/repos/builtin/packages/tetgen/package.py2
-rw-r--r--var/spack/repos/builtin/packages/tinyxml/package.py7
-rw-r--r--var/spack/repos/builtin/packages/tk/package.py5
-rw-r--r--var/spack/repos/builtin/packages/trilinos/package.py10
-rw-r--r--var/spack/repos/builtin/packages/unison/package.py2
-rw-r--r--var/spack/repos/builtin/packages/util-linux/package.py2
-rw-r--r--var/spack/repos/builtin/packages/vim/package.py1
-rw-r--r--var/spack/repos/builtin/packages/voropp/package.py11
-rw-r--r--var/spack/repos/builtin/packages/vtk/package.py1
-rw-r--r--var/spack/repos/builtin/packages/wannier90/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xsdktrilinos/package.py10
-rw-r--r--var/spack/repos/builtin/packages/yorick/package.py10
-rw-r--r--var/spack/repos/builtin/packages/zoltan/package.py5
365 files changed, 16637 insertions, 2874 deletions
diff --git a/.travis.yml b/.travis.yml
index 11e7e5fac3..d7bdf9b2ca 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -22,6 +22,22 @@ matrix:
os: linux
language: python
env: TEST_SUITE=unit
+ - python: '3.3'
+ os: linux
+ language: python
+ env: TEST_SUITE=unit
+ - python: '3.4'
+ os: linux
+ language: python
+ env: TEST_SUITE=unit
+ - python: '3.5'
+ os: linux
+ language: python
+ env: TEST_SUITE=unit
+ - python: '3.6'
+ os: linux
+ language: python
+ env: TEST_SUITE=unit
- python: '2.7'
os: linux
language: python
@@ -45,8 +61,8 @@ addons:
apt:
packages:
- gfortran
+ - mercurial
- graphviz
- - libyaml-dev
# Work around Travis's lack of support for Python on OSX
before_install:
@@ -61,7 +77,6 @@ install:
- pip install --upgrade codecov
- pip install --upgrade flake8
- pip install --upgrade sphinx
- - pip install --upgrade mercurial
before_script:
# Need this for the git tests to succeed.
diff --git a/README.md b/README.md
index 375aad4dd7..68c2939ec6 100644
--- a/README.md
+++ b/README.md
@@ -23,7 +23,8 @@ See the
[Feature Overview](http://spack.readthedocs.io/en/latest/features.html)
for examples and highlights.
-To install spack and install your first package:
+To install spack and install your first package, make sure you have
+Python (2 or 3). Then:
$ git clone https://github.com/llnl/spack.git
$ cd spack/bin
diff --git a/bin/spack b/bin/spack
index f885f577c8..c737a0f178 100755
--- a/bin/spack
+++ b/bin/spack
@@ -24,10 +24,12 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import sys
-if (sys.version_info[0] > 2) or (sys.version_info[:2] < (2, 6)):
+if sys.version_info[:2] < (2, 6):
v_info = sys.version_info[:3]
- sys.exit("Spack requires Python 2.6 or 2.7. "
+ sys.exit("Spack requires Python 2.6 or higher."
"This is Python %d.%d.%d." % v_info)
import os
@@ -46,6 +48,13 @@ sys.path.insert(0, SPACK_LIB_PATH)
SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
sys.path.insert(0, SPACK_EXTERNAL_LIBS)
+# Handle vendoring of YAML specially, as it has two versions.
+if sys.version_info[0] == 2:
+ SPACK_YAML_LIBS = os.path.join(SPACK_EXTERNAL_LIBS, "yaml/lib")
+else:
+ SPACK_YAML_LIBS = os.path.join(SPACK_EXTERNAL_LIBS, "yaml/lib3")
+sys.path.insert(0, SPACK_YAML_LIBS)
+
# Quick and dirty check to clean orphaned .pyc files left over from
# previous revisions. These files were present in earlier versions of
# Spack, were removed, but shadow system modules that Spack still
@@ -67,8 +76,8 @@ for pyc_file in orphaned_pyc_files:
try:
os.remove(pyc_file)
except OSError as e:
- print ("WARNING: Spack may fail mysteriously. "
- "Couldn't remove orphaned .pyc file: %s" % pyc_file)
+ print("WARNING: Spack may fail mysteriously. "
+ "Couldn't remove orphaned .pyc file: %s" % pyc_file)
# If there is no working directory, use the spack prefix.
try:
diff --git a/etc/spack/defaults/packages.yaml b/etc/spack/defaults/packages.yaml
index 37737da7c6..0cafab28e9 100644
--- a/etc/spack/defaults/packages.yaml
+++ b/etc/spack/defaults/packages.yaml
@@ -17,8 +17,17 @@ packages:
all:
compiler: [gcc, intel, pgi, clang, xl, nag]
providers:
- mpi: [openmpi, mpich]
+ awk: [gawk]
blas: [openblas]
+ daal: [intel-parallel-studio+daal]
+ elf: [elfutils]
+ golang: [gcc]
+ ipp: [intel-parallel-studio+ipp]
lapack: [openblas]
- awk: [gawk]
+ mkl: [intel-parallel-studio+mkl]
+ mpe: [mpe2]
+ mpi: [openmpi, mpich]
+ opencl: [pocl]
+ openfoam: [foam-extend]
pil: [py-pillow]
+ scalapack: [netlib-scalapack]
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index db8d3d29dc..69ec2a0b33 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -51,6 +51,10 @@ from sphinx.apidoc import main as sphinx_apidoc
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('exts'))
sys.path.insert(0, os.path.abspath('../external'))
+if sys.version_info[0] < 3:
+ sys.path.insert(0, os.path.abspath('../external/yaml/lib'))
+else:
+ sys.path.insert(0, os.path.abspath('../external/yaml/lib3'))
sys.path.append(os.path.abspath('..'))
# Add the Spack bin directory to the path so that we can use its output in docs.
@@ -110,13 +114,13 @@ handling_spack = False
for line in fileinput.input('spack.rst', inplace=1):
if handling_spack:
if not line.startswith(' :noindex:'):
- print ' :noindex: %s' % ' '.join(spack.__all__)
+ print(' :noindex: %s' % ' '.join(spack.__all__))
handling_spack = False
if line.startswith('.. automodule::'):
handling_spack = (line == '.. automodule:: spack\n')
- print line,
+ sys.stdout.write(line)
# Enable todo items
todo_include_todos = True
diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst
index e9cfe1fa54..a3b3197181 100644
--- a/lib/spack/docs/contribution_guide.rst
+++ b/lib/spack/docs/contribution_guide.rst
@@ -40,7 +40,7 @@ for the results of these tests after submitting a PR, we recommend that you run
locally to speed up the review process.
If you take a look in ``$SPACK_ROOT/.travis.yml``, you'll notice that we test
-against Python 2.6 and 2.7. We currently perform 3 types of tests:
+against Python 2.6, 2.7, and 3.3-3.6. We currently perform 3 types of tests:
^^^^^^^^^^
Unit Tests
diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst
index 0ce4029950..ea8d50c6ca 100644
--- a/lib/spack/docs/developer_guide.rst
+++ b/lib/spack/docs/developer_guide.rst
@@ -447,16 +447,16 @@ the string that it detected to be the name and version. The
``--incorrect-name`` and ``--incorrect-version`` flags can be used to
print URLs that were not being parsed correctly.
-""""""""""""""""""
-``spack url test``
-""""""""""""""""""
+"""""""""""""""""""""
+``spack url summary``
+"""""""""""""""""""""
This command attempts to parse every URL for every package in Spack
and prints a summary of how many of them are being correctly parsed.
It also prints a histogram showing which regular expressions are being
matched and how frequently:
-.. command-output:: spack url test
+.. command-output:: spack url summary
This command is essential for anyone adding or changing the regular
expressions that parse names and versions. By running this command
diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst
index 3c2610beb0..971d42cea0 100644
--- a/lib/spack/docs/getting_started.rst
+++ b/lib/spack/docs/getting_started.rst
@@ -11,7 +11,7 @@ Prerequisites
Spack has the following minimum requirements, which must be installed
before Spack is run:
-1. Python 2.6 or 2.7
+1. Python 2 (2.6 or 2.7) or 3 (3.3 - 3.6)
2. A C/C++ compiler
3. The ``git`` and ``curl`` commands.
@@ -774,7 +774,7 @@ This problem is related to OpenSSL, and in some cases might be solved
by installing a new version of ``git`` and ``openssl``:
#. Run ``spack install git``
-#. Add the output of ``spack module loads git`` to your ``.bahsrc``.
+#. Add the output of ``spack module loads git`` to your ``.bashrc``.
If this doesn't work, it is also possible to disable checking of SSL
certificates by using:
diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst
index 4dffe6f091..2e99e96a3e 100644
--- a/lib/spack/docs/index.rst
+++ b/lib/spack/docs/index.rst
@@ -4,9 +4,13 @@
contain the root `toctree` directive.
===================
-Spack Documentation
+Spack
===================
+.. epigraph::
+
+ `These are docs for the Spack package manager. For sphere packing, see` `pyspack <https://pyspack.readthedocs.io>`_.
+
Spack is a package management tool designed to support multiple
versions and configurations of software on a wide variety of platforms
and environments. It was designed for large supercomputing centers,
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 729ea5d656..18541179b2 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1560,6 +1560,28 @@ Python's ``setup_dependent_environment`` method also sets up some
other variables, creates a directory, and sets up the ``PYTHONPATH``
so that dependent packages can find their dependencies at build time.
+.. _packaging_conflicts:
+
+---------
+Conflicts
+---------
+
+Sometimes packages have known bugs, or limitations, that would prevent them
+to build e.g. against other dependencies or with certain compilers. Spack
+makes it possible to express such constraints with the ``conflicts`` directive.
+
+Adding the following to a package:
+
+.. code-block:: python
+
+ conflicts('%intel', when='@1.2')
+
+we express the fact that the current package *cannot be built* with the Intel
+compiler when we are trying to install version "1.2". The ``when`` argument can
+be omitted, in which case the conflict will always be active.
+Conflicts are always evaluated after the concretization step has been performed,
+and if any match is found a detailed error message is shown to the user.
+
.. _packaging_extensions:
----------
@@ -2043,6 +2065,10 @@ The classes that are currently provided by Spack are:
| :py:class:`.PythonPackage` | Specialized class for |
| | :py:class:`.Python` extensions |
+------------------------------------+----------------------------------+
+ | :py:class:`.PerlPackage` | Specialized class for |
+ | | :py:class:`.Perl` extensions |
+ +------------------------------------+----------------------------------+
+
diff --git a/lib/spack/external/_pytest/pytester.py b/lib/spack/external/_pytest/pytester.py
index 17ff529a6c..d87c0a762a 100644
--- a/lib/spack/external/_pytest/pytester.py
+++ b/lib/spack/external/_pytest/pytester.py
@@ -551,7 +551,7 @@ class Testdir:
def _possibly_invalidate_import_caches(self):
# invalidate caches if we can (py33 and above)
try:
- import importlib
+ import importlib # nopyqver
except ImportError:
pass
else:
diff --git a/lib/spack/external/functools_backport.py b/lib/spack/external/functools_backport.py
index 19f0903c82..b3c913ffd7 100644
--- a/lib/spack/external/functools_backport.py
+++ b/lib/spack/external/functools_backport.py
@@ -28,3 +28,20 @@ def total_ordering(cls):
opfunc.__doc__ = getattr(int, opname).__doc__
setattr(cls, opname, opfunc)
return cls
+
+
+@total_ordering
+class reverse_order(object):
+ """Helper for creating key functions.
+
+ This is a wrapper that inverts the sense of the natural
+ comparisons on the object.
+ """
+ def __init__(self, value):
+ self.value = value
+
+ def __eq__(self, other):
+ return other.value == self.value
+
+ def __lt__(self, other):
+ return other.value < self.value
diff --git a/lib/spack/external/ordereddict_backport.py b/lib/spack/external/ordereddict_backport.py
index 8ddad1477e..154e5d1872 100644
--- a/lib/spack/external/ordereddict_backport.py
+++ b/lib/spack/external/ordereddict_backport.py
@@ -8,7 +8,13 @@
try:
from thread import get_ident as _get_ident
except ImportError:
- from dummy_thread import get_ident as _get_ident
+ try:
+ from dummy_thread import get_ident as _get_ident
+ except ImportError:
+ try:
+ from _dummy_thread import get_ident as _get_ident
+ except ImportError:
+ from threading import get_ident as _get_ident # nopyqver
try:
from _abcoll import KeysView, ValuesView, ItemsView
diff --git a/lib/spack/external/pyqver2.py b/lib/spack/external/pyqver2.py
index 571e005524..07b191425b 100755
--- a/lib/spack/external/pyqver2.py
+++ b/lib/spack/external/pyqver2.py
@@ -57,11 +57,7 @@ StandardModules = {
"hmac": (2, 2),
"hotshot": (2, 2),
"HTMLParser": (2, 2),
-# skip importlib until we can conditionally skip for pytest.
-# pytest tries to import this and catches the exception, but
-# the test will still fail.
-# TODO: can we excelude with a comment like '# flake: noqa?'
-# "importlib": (2, 7),
+ "importlib": (2, 7),
"inspect": (2, 1),
"io": (2, 6),
"itertools": (2, 3),
@@ -262,7 +258,7 @@ class NodeChecker(object):
self.add(node, (2,2), "yield expression")
self.default(node)
-def get_versions(source):
+def get_versions(source, filename=None):
"""Return information about the Python versions required for specific features.
The return value is a dictionary with keys as a version number as a tuple
@@ -346,65 +342,3 @@ def qver(source):
#(2, 6)
"""
return max(get_versions(source).keys())
-
-
-if __name__ == '__main__':
-
- Verbose = False
- MinVersion = (2, 3)
- Lint = False
-
- files = []
- i = 1
- while i < len(sys.argv):
- a = sys.argv[i]
- if a == "--test":
- import doctest
- doctest.testmod()
- sys.exit(0)
- if a == "-v" or a == "--verbose":
- Verbose = True
- elif a == "-l" or a == "--lint":
- Lint = True
- elif a == "-m" or a == "--min-version":
- i += 1
- MinVersion = tuple(map(int, sys.argv[i].split(".")))
- else:
- files.append(a)
- i += 1
-
- if not files:
- print >>sys.stderr, """Usage: %s [options] source ...
-
- Report minimum Python version required to run given source files.
-
- -m x.y or --min-version x.y (default 2.3)
- report version triggers at or above version x.y in verbose mode
- -v or --verbose
- print more detailed report of version triggers for each version
- """ % sys.argv[0]
- sys.exit(1)
-
- for fn in files:
- try:
- f = open(fn)
- source = f.read()
- f.close()
- ver = get_versions(source)
- if Verbose:
- print fn
- for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True):
- reasons = [x for x in uniq(ver[v]) if x]
- if reasons:
- # each reason is (lineno, message)
- print "\t%s\t%s" % (".".join(map(str, v)), ", ".join([x[1] for x in reasons]))
- elif Lint:
- for v in sorted([k for k in ver.keys() if k >= MinVersion], reverse=True):
- reasons = [x for x in uniq(ver[v]) if x]
- for r in reasons:
- # each reason is (lineno, message)
- print "%s:%s: %s %s" % (fn, r[0], ".".join(map(str, v)), r[1])
- else:
- print "%s\t%s" % (".".join(map(str, max(ver.keys()))), fn)
- except SyntaxError, x:
- print "%s: syntax error compiling with Python %s: %s" % (fn, platform.python_version(), x)
diff --git a/lib/spack/external/pyqver3.py b/lib/spack/external/pyqver3.py
new file mode 100755
index 0000000000..b63576a064
--- /dev/null
+++ b/lib/spack/external/pyqver3.py
@@ -0,0 +1,248 @@
+#!/usr/bin/env python3
+#
+# pyqver3.py
+# by Greg Hewgill
+# https://github.com/ghewgill/pyqver
+#
+# This software is provided 'as-is', without any express or implied
+# warranty. In no event will the author be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+# claim that you wrote the original software. If you use this software
+# in a product, an acknowledgment in the product documentation would be
+# appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+# misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+#
+# Copyright (c) 2009-2013 Greg Hewgill http://hewgill.com
+#
+import ast
+import platform
+import sys
+
+StandardModules = {
+# skip argparse now that it's in lib/spack/external
+# "argparse": (3, 2),
+ "faulthandler": (3, 3),
+ "importlib": (3, 1),
+ "ipaddress": (3, 3),
+ "lzma": (3, 3),
+ "tkinter.ttk": (3, 1),
+ "unittest.mock": (3, 3),
+ "venv": (3, 3),
+}
+
+Functions = {
+ "bytearray.maketrans": (3, 1),
+ "bytes.maketrans": (3, 1),
+ "bz2.open": (3, 3),
+ "collections.Counter": (3, 1),
+ "collections.OrderedDict": (3, 1),
+ "crypt.mksalt": (3, 3),
+ "email.generator.BytesGenerator": (3, 2),
+ "email.message_from_binary_file": (3, 2),
+ "email.message_from_bytes": (3, 2),
+ "functools.lru_cache": (3, 2),
+ "gzip.compress": (3, 2),
+ "gzip.decompress": (3, 2),
+ "inspect.getclosurevars": (3, 3),
+ "inspect.getgeneratorlocals": (3, 3),
+ "inspect.getgeneratorstate": (3, 2),
+ "itertools.combinations_with_replacement": (3, 1),
+ "itertools.compress": (3, 1),
+ "logging.config.dictConfig": (3, 2),
+ "logging.NullHandler": (3, 1),
+ "math.erf": (3, 2),
+ "math.erfc": (3, 2),
+ "math.expm1": (3, 2),
+ "math.gamma": (3, 2),
+ "math.isfinite": (3, 2),
+ "math.lgamma": (3, 2),
+ "math.log2": (3, 3),
+ "os.environb": (3, 2),
+ "os.fsdecode": (3, 2),
+ "os.fsencode": (3, 2),
+ "os.fwalk": (3, 3),
+ "os.getenvb": (3, 2),
+ "os.get_exec_path": (3, 2),
+ "os.getgrouplist": (3, 3),
+ "os.getpriority": (3, 3),
+ "os.getresgid": (3, 2),
+ "os.getresuid": (3, 2),
+ "os.get_terminal_size": (3, 3),
+ "os.getxattr": (3, 3),
+ "os.initgroups": (3, 2),
+ "os.listxattr": (3, 3),
+ "os.lockf": (3, 3),
+ "os.pipe2": (3, 3),
+ "os.posix_fadvise": (3, 3),
+ "os.posix_fallocate": (3, 3),
+ "os.pread": (3, 3),
+ "os.pwrite": (3, 3),
+ "os.readv": (3, 3),
+ "os.removexattr": (3, 3),
+ "os.replace": (3, 3),
+ "os.sched_get_priority_max": (3, 3),
+ "os.sched_get_priority_min": (3, 3),
+ "os.sched_getaffinity": (3, 3),
+ "os.sched_getparam": (3, 3),
+ "os.sched_getscheduler": (3, 3),
+ "os.sched_rr_get_interval": (3, 3),
+ "os.sched_setaffinity": (3, 3),
+ "os.sched_setparam": (3, 3),
+ "os.sched_setscheduler": (3, 3),
+ "os.sched_yield": (3, 3),
+ "os.sendfile": (3, 3),
+ "os.setpriority": (3, 3),
+ "os.setresgid": (3, 2),
+ "os.setresuid": (3, 2),
+ "os.setxattr": (3, 3),
+ "os.sync": (3, 3),
+ "os.truncate": (3, 3),
+ "os.waitid": (3, 3),
+ "os.writev": (3, 3),
+ "shutil.chown": (3, 3),
+ "shutil.disk_usage": (3, 3),
+ "shutil.get_archive_formats": (3, 3),
+ "shutil.get_terminal_size": (3, 3),
+ "shutil.get_unpack_formats": (3, 3),
+ "shutil.make_archive": (3, 3),
+ "shutil.register_archive_format": (3, 3),
+ "shutil.register_unpack_format": (3, 3),
+ "shutil.unpack_archive": (3, 3),
+ "shutil.unregister_archive_format": (3, 3),
+ "shutil.unregister_unpack_format": (3, 3),
+ "shutil.which": (3, 3),
+ "signal.pthread_kill": (3, 3),
+ "signal.pthread_sigmask": (3, 3),
+ "signal.sigpending": (3, 3),
+ "signal.sigtimedwait": (3, 3),
+ "signal.sigwait": (3, 3),
+ "signal.sigwaitinfo": (3, 3),
+ "socket.CMSG_LEN": (3, 3),
+ "socket.CMSG_SPACE": (3, 3),
+ "socket.fromshare": (3, 3),
+ "socket.if_indextoname": (3, 3),
+ "socket.if_nameindex": (3, 3),
+ "socket.if_nametoindex": (3, 3),
+ "socket.sethostname": (3, 3),
+ "ssl.match_hostname": (3, 2),
+ "ssl.RAND_bytes": (3, 3),
+ "ssl.RAND_pseudo_bytes": (3, 3),
+ "ssl.SSLContext": (3, 2),
+ "ssl.SSLEOFError": (3, 3),
+ "ssl.SSLSyscallError": (3, 3),
+ "ssl.SSLWantReadError": (3, 3),
+ "ssl.SSLWantWriteError": (3, 3),
+ "ssl.SSLZeroReturnError": (3, 3),
+ "stat.filemode": (3, 3),
+ "textwrap.indent": (3, 3),
+ "threading.get_ident": (3, 3),
+ "time.clock_getres": (3, 3),
+ "time.clock_gettime": (3, 3),
+ "time.clock_settime": (3, 3),
+ "time.get_clock_info": (3, 3),
+ "time.monotonic": (3, 3),
+ "time.perf_counter": (3, 3),
+ "time.process_time": (3, 3),
+ "types.new_class": (3, 3),
+ "types.prepare_class": (3, 3),
+}
+
+def uniq(a):
+ if len(a) == 0:
+ return []
+ else:
+ return [a[0]] + uniq([x for x in a if x != a[0]])
+
+class NodeChecker(ast.NodeVisitor):
+ def __init__(self):
+ self.vers = dict()
+ self.vers[(3,0)] = []
+ def add(self, node, ver, msg):
+ if ver not in self.vers:
+ self.vers[ver] = []
+ self.vers[ver].append((node.lineno, msg))
+ def visit_Call(self, node):
+ def rollup(n):
+ if isinstance(n, ast.Name):
+ return n.id
+ elif isinstance(n, ast.Attribute):
+ r = rollup(n.value)
+ if r:
+ return r + "." + n.attr
+ name = rollup(node.func)
+ if name:
+ v = Functions.get(name)
+ if v is not None:
+ self.add(node, v, name)
+ self.generic_visit(node)
+ def visit_Import(self, node):
+ for n in node.names:
+ v = StandardModules.get(n.name)
+ if v is not None:
+ self.add(node, v, n.name)
+ self.generic_visit(node)
+ def visit_ImportFrom(self, node):
+ v = StandardModules.get(node.module)
+ if v is not None:
+ self.add(node, v, node.module)
+ for n in node.names:
+ name = node.module + "." + n.name
+ v = Functions.get(name)
+ if v is not None:
+ self.add(node, v, name)
+ def visit_Raise(self, node):
+ if isinstance(node.cause, ast.Name) and node.cause.id == "None":
+ self.add(node, (3,3), "raise ... from None")
+ def visit_YieldFrom(self, node):
+ self.add(node, (3,3), "yield from")
+
+def get_versions(source, filename=None):
+ """Return information about the Python versions required for specific features.
+
+ The return value is a dictionary with keys as a version number as a tuple
+ (for example Python 3.1 is (3,1)) and the value are a list of features that
+ require the indicated Python version.
+ """
+ tree = ast.parse(source, filename=filename)
+ checker = NodeChecker()
+ checker.visit(tree)
+ return checker.vers
+
+def v33(source):
+ if sys.version_info >= (3, 3):
+ return qver(source)
+ else:
+ print("Not all features tested, run --test with Python 3.3", file=sys.stderr)
+ return (3, 3)
+
+def qver(source):
+ """Return the minimum Python version required to run a particular bit of code.
+
+ >>> qver('print("hello world")')
+ (3, 0)
+ >>> qver("import importlib")
+ (3, 1)
+ >>> qver("from importlib import x")
+ (3, 1)
+ >>> qver("import tkinter.ttk")
+ (3, 1)
+ >>> qver("from collections import Counter")
+ (3, 1)
+ >>> qver("collections.OrderedDict()")
+ (3, 1)
+ >>> qver("import functools\\n@functools.lru_cache()\\ndef f(x): x*x")
+ (3, 2)
+ >>> v33("yield from x")
+ (3, 3)
+ >>> v33("raise x from None")
+ (3, 3)
+ """
+ return max(get_versions(source).keys())
diff --git a/lib/spack/external/six.py b/lib/spack/external/six.py
new file mode 100644
index 0000000000..5293325821
--- /dev/null
+++ b/lib/spack/external/six.py
@@ -0,0 +1,886 @@
+# Copyright (c) 2010-2017 Benjamin Peterson
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in all
+# copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
+
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+from __future__ import absolute_import
+
+import functools
+import itertools
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.10.0"
+
+
+# Useful for very coarse version differentiation.
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+PY34 = sys.version_info[0:2] >= (3, 4)
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ if sys.platform.startswith("java"):
+ # Jython always uses 32 bits.
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result) # Invokes __set__.
+ try:
+ # This is a bit ugly, but it avoids running this again by
+ # removing this descriptor.
+ delattr(obj.__class__, self.name)
+ except AttributeError:
+ pass
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+ def __getattr__(self, attr):
+ _module = self._resolve()
+ value = getattr(_module, attr)
+ setattr(self, attr, value)
+ return value
+
+
+class _LazyModule(types.ModuleType):
+
+ def __init__(self, name):
+ super(_LazyModule, self).__init__(name)
+ self.__doc__ = self.__class__.__doc__
+
+ def __dir__(self):
+ attrs = ["__doc__", "__name__"]
+ attrs += [attr.name for attr in self._moved_attributes]
+ return attrs
+
+ # Subclasses should override this
+ _moved_attributes = []
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+class _SixMetaPathImporter(object):
+
+ """
+ A meta path importer to import six.moves and its submodules.
+
+ This class implements a PEP302 finder and loader. It should be compatible
+ with Python 2.5 and all existing versions of Python3
+ """
+
+ def __init__(self, six_module_name):
+ self.name = six_module_name
+ self.known_modules = {}
+
+ def _add_module(self, mod, *fullnames):
+ for fullname in fullnames:
+ self.known_modules[self.name + "." + fullname] = mod
+
+ def _get_module(self, fullname):
+ return self.known_modules[self.name + "." + fullname]
+
+ def find_module(self, fullname, path=None):
+ if fullname in self.known_modules:
+ return self
+ return None
+
+ def __get_module(self, fullname):
+ try:
+ return self.known_modules[fullname]
+ except KeyError:
+ raise ImportError("This loader does not know module " + fullname)
+
+ def load_module(self, fullname):
+ try:
+ # in case of a reload
+ return sys.modules[fullname]
+ except KeyError:
+ pass
+ mod = self.__get_module(fullname)
+ if isinstance(mod, MovedModule):
+ mod = mod._resolve()
+ else:
+ mod.__loader__ = self
+ sys.modules[fullname] = mod
+ return mod
+
+ def is_package(self, fullname):
+ """
+ Return true, if the named module is a package.
+
+ We need this method to get correct spec objects with
+ Python 3.4 (see PEP451)
+ """
+ return hasattr(self.__get_module(fullname), "__path__")
+
+ def get_code(self, fullname):
+ """Return None
+
+ Required, if is_package is implemented"""
+ self.__get_module(fullname) # eventually raises ImportError
+ return None
+ get_source = get_code # same as get_code
+
+_importer = _SixMetaPathImporter(__name__)
+
+
+class _MovedItems(_LazyModule):
+
+ """Lazy loading of moved objects"""
+ __path__ = [] # mark as package
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
+ MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
+ MovedAttribute("intern", "__builtin__", "sys"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
+ MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
+ MovedAttribute("getstatusoutput", "commands", "subprocess"),
+ MovedAttribute("getoutput", "commands", "subprocess"),
+ MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("UserDict", "UserDict", "collections"),
+ MovedAttribute("UserList", "UserList", "collections"),
+ MovedAttribute("UserString", "UserString", "collections"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+ MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
+ MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
+ MovedModule("email_mime_image", "email.MIMEImage", "email.mime.image"),
+ MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
+ MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
+ MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("_thread", "thread", "_thread"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
+ MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
+ MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
+ MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
+]
+# Add windows specific modules.
+if sys.platform == "win32":
+ _moved_attributes += [
+ MovedModule("winreg", "_winreg"),
+ ]
+
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+ if isinstance(attr, MovedModule):
+ _importer._add_module(attr, "moves." + attr.name)
+del attr
+
+_MovedItems._moved_attributes = _moved_attributes
+
+moves = _MovedItems(__name__ + ".moves")
+_importer._add_module(moves, "moves")
+
+
+class Module_six_moves_urllib_parse(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_parse"""
+
+
+_urllib_parse_moved_attributes = [
+ MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
+ MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
+ MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
+ MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
+ MovedAttribute("urljoin", "urlparse", "urllib.parse"),
+ MovedAttribute("urlparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
+ MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
+ MovedAttribute("quote", "urllib", "urllib.parse"),
+ MovedAttribute("quote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
+ MovedAttribute("unquote_to_bytes", "urllib", "urllib.parse", "unquote", "unquote_to_bytes"),
+ MovedAttribute("urlencode", "urllib", "urllib.parse"),
+ MovedAttribute("splitquery", "urllib", "urllib.parse"),
+ MovedAttribute("splittag", "urllib", "urllib.parse"),
+ MovedAttribute("splituser", "urllib", "urllib.parse"),
+ MovedAttribute("splitvalue", "urllib", "urllib.parse"),
+ MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_params", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_query", "urlparse", "urllib.parse"),
+ MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
+]
+for attr in _urllib_parse_moved_attributes:
+ setattr(Module_six_moves_urllib_parse, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
+ "moves.urllib_parse", "moves.urllib.parse")
+
+
+class Module_six_moves_urllib_error(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_error"""
+
+
+_urllib_error_moved_attributes = [
+ MovedAttribute("URLError", "urllib2", "urllib.error"),
+ MovedAttribute("HTTPError", "urllib2", "urllib.error"),
+ MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
+]
+for attr in _urllib_error_moved_attributes:
+ setattr(Module_six_moves_urllib_error, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
+ "moves.urllib_error", "moves.urllib.error")
+
+
+class Module_six_moves_urllib_request(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_request"""
+
+
+_urllib_request_moved_attributes = [
+ MovedAttribute("urlopen", "urllib2", "urllib.request"),
+ MovedAttribute("install_opener", "urllib2", "urllib.request"),
+ MovedAttribute("build_opener", "urllib2", "urllib.request"),
+ MovedAttribute("pathname2url", "urllib", "urllib.request"),
+ MovedAttribute("url2pathname", "urllib", "urllib.request"),
+ MovedAttribute("getproxies", "urllib", "urllib.request"),
+ MovedAttribute("Request", "urllib2", "urllib.request"),
+ MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
+ MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FileHandler", "urllib2", "urllib.request"),
+ MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
+ MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
+ MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
+ MovedAttribute("urlretrieve", "urllib", "urllib.request"),
+ MovedAttribute("urlcleanup", "urllib", "urllib.request"),
+ MovedAttribute("URLopener", "urllib", "urllib.request"),
+ MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
+ MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
+]
+for attr in _urllib_request_moved_attributes:
+ setattr(Module_six_moves_urllib_request, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
+ "moves.urllib_request", "moves.urllib.request")
+
+
+class Module_six_moves_urllib_response(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_response"""
+
+
+_urllib_response_moved_attributes = [
+ MovedAttribute("addbase", "urllib", "urllib.response"),
+ MovedAttribute("addclosehook", "urllib", "urllib.response"),
+ MovedAttribute("addinfo", "urllib", "urllib.response"),
+ MovedAttribute("addinfourl", "urllib", "urllib.response"),
+]
+for attr in _urllib_response_moved_attributes:
+ setattr(Module_six_moves_urllib_response, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
+ "moves.urllib_response", "moves.urllib.response")
+
+
+class Module_six_moves_urllib_robotparser(_LazyModule):
+
+ """Lazy loading of moved objects in six.moves.urllib_robotparser"""
+
+
+_urllib_robotparser_moved_attributes = [
+ MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
+]
+for attr in _urllib_robotparser_moved_attributes:
+ setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
+del attr
+
+Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
+
+_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
+ "moves.urllib_robotparser", "moves.urllib.robotparser")
+
+
+class Module_six_moves_urllib(types.ModuleType):
+
+ """Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
+ __path__ = [] # mark as package
+ parse = _importer._get_module("moves.urllib_parse")
+ error = _importer._get_module("moves.urllib_error")
+ request = _importer._get_module("moves.urllib_request")
+ response = _importer._get_module("moves.urllib_response")
+ robotparser = _importer._get_module("moves.urllib_robotparser")
+
+ def __dir__(self):
+ return ['parse', 'error', 'request', 'response', 'robotparser']
+
+_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
+ "moves.urllib")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_closure = "__closure__"
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+ _func_globals = "__globals__"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_closure = "func_closure"
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+ _func_globals = "func_globals"
+
+
+try:
+ advance_iterator = next
+except NameError:
+ def advance_iterator(it):
+ return it.next()
+next = advance_iterator
+
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+ create_bound_method = types.MethodType
+
+ def create_unbound_method(func, cls):
+ return func
+
+ Iterator = object
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+ def create_bound_method(func, obj):
+ return types.MethodType(func, obj, obj.__class__)
+
+ def create_unbound_method(func, cls):
+ return types.MethodType(func, None, cls)
+
+ class Iterator(object):
+
+ def next(self):
+ return type(self).__next__(self)
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_closure = operator.attrgetter(_func_closure)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+get_function_globals = operator.attrgetter(_func_globals)
+
+
+if PY3:
+ def iterkeys(d, **kw):
+ return iter(d.keys(**kw))
+
+ def itervalues(d, **kw):
+ return iter(d.values(**kw))
+
+ def iteritems(d, **kw):
+ return iter(d.items(**kw))
+
+ def iterlists(d, **kw):
+ return iter(d.lists(**kw))
+
+ viewkeys = operator.methodcaller("keys")
+
+ viewvalues = operator.methodcaller("values")
+
+ viewitems = operator.methodcaller("items")
+else:
+ def iterkeys(d, **kw):
+ return d.iterkeys(**kw)
+
+ def itervalues(d, **kw):
+ return d.itervalues(**kw)
+
+ def iteritems(d, **kw):
+ return d.iteritems(**kw)
+
+ def iterlists(d, **kw):
+ return d.iterlists(**kw)
+
+ viewkeys = operator.methodcaller("viewkeys")
+
+ viewvalues = operator.methodcaller("viewvalues")
+
+ viewitems = operator.methodcaller("viewitems")
+
+_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
+_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
+_add_doc(iteritems,
+ "Return an iterator over the (key, value) pairs of a dictionary.")
+_add_doc(iterlists,
+ "Return an iterator over the (key, [values]) pairs of a dictionary.")
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+
+ def u(s):
+ return s
+ unichr = chr
+ import struct
+ int2byte = struct.Struct(">B").pack
+ del struct
+ byte2int = operator.itemgetter(0)
+ indexbytes = operator.getitem
+ iterbytes = iter
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+ _assertCountEqual = "assertCountEqual"
+ if sys.version_info[1] <= 1:
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+ else:
+ _assertRaisesRegex = "assertRaisesRegex"
+ _assertRegex = "assertRegex"
+else:
+ def b(s):
+ return s
+ # Workaround for standalone backslash
+
+ def u(s):
+ return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
+ unichr = unichr
+ int2byte = chr
+
+ def byte2int(bs):
+ return ord(bs[0])
+
+ def indexbytes(buf, i):
+ return ord(buf[i])
+ iterbytes = functools.partial(itertools.imap, ord)
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+ _assertCountEqual = "assertItemsEqual"
+ _assertRaisesRegex = "assertRaisesRegexp"
+ _assertRegex = "assertRegexpMatches"
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+def assertCountEqual(self, *args, **kwargs):
+ return getattr(self, _assertCountEqual)(*args, **kwargs)
+
+
+def assertRaisesRegex(self, *args, **kwargs):
+ return getattr(self, _assertRaisesRegex)(*args, **kwargs)
+
+
+def assertRegex(self, *args, **kwargs):
+ return getattr(self, _assertRegex)(*args, **kwargs)
+
+
+if PY3:
+ exec_ = getattr(moves.builtins, "exec")
+
+ def reraise(tp, value, tb=None):
+ try:
+ if value is None:
+ value = tp()
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+ finally:
+ value = None
+ tb = None
+
+else:
+ def exec_(_code_, _globs_=None, _locs_=None):
+ """Execute code in a namespace."""
+ if _globs_ is None:
+ frame = sys._getframe(1)
+ _globs_ = frame.f_globals
+ if _locs_ is None:
+ _locs_ = frame.f_locals
+ del frame
+ elif _locs_ is None:
+ _locs_ = _globs_
+ exec("""exec _code_ in _globs_, _locs_""")
+
+ exec_("""def reraise(tp, value, tb=None):
+ try:
+ raise tp, value, tb
+ finally:
+ tb = None
+""")
+
+
+if sys.version_info[:2] == (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ if from_value is None:
+ raise value
+ raise value from from_value
+ finally:
+ value = None
+""")
+elif sys.version_info[:2] > (3, 2):
+ exec_("""def raise_from(value, from_value):
+ try:
+ raise value from from_value
+ finally:
+ value = None
+""")
+else:
+ def raise_from(value, from_value):
+ raise value
+
+
+print_ = getattr(moves.builtins, "print", None)
+if print_ is None:
+ def print_(*args, **kwargs):
+ """The new-style print function for Python 2.4 and 2.5."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ # If the file has an encoding, encode unicode with it.
+ if (isinstance(fp, file) and
+ isinstance(data, unicode) and
+ fp.encoding is not None):
+ errors = getattr(fp, "errors", None)
+ if errors is None:
+ errors = "strict"
+ data = data.encode(fp.encoding, errors)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+if sys.version_info[:2] < (3, 3):
+ _print = print_
+
+ def print_(*args, **kwargs):
+ fp = kwargs.get("file", sys.stdout)
+ flush = kwargs.pop("flush", False)
+ _print(*args, **kwargs)
+ if flush and fp is not None:
+ fp.flush()
+
+_add_doc(reraise, """Reraise an exception.""")
+
+if sys.version_info[0:2] < (3, 4):
+ def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
+ updated=functools.WRAPPER_UPDATES):
+ def wrapper(f):
+ f = functools.wraps(wrapped, assigned, updated)(f)
+ f.__wrapped__ = wrapped
+ return f
+ return wrapper
+else:
+ wraps = functools.wraps
+
+
+def with_metaclass(meta, *bases):
+ """Create a base class with a metaclass."""
+ # This requires a bit of explanation: the basic idea is to make a dummy
+ # metaclass for one level of class instantiation that replaces itself with
+ # the actual metaclass.
+ class metaclass(meta):
+
+ def __new__(cls, name, this_bases, d):
+ return meta(name, bases, d)
+ return type.__new__(metaclass, 'temporary_class', (), {})
+
+
+def add_metaclass(metaclass):
+ """Class decorator for creating a class with a metaclass."""
+ def wrapper(cls):
+ orig_vars = cls.__dict__.copy()
+ slots = orig_vars.get('__slots__')
+ if slots is not None:
+ if isinstance(slots, str):
+ slots = [slots]
+ for slots_var in slots:
+ orig_vars.pop(slots_var)
+ orig_vars.pop('__dict__', None)
+ orig_vars.pop('__weakref__', None)
+ return metaclass(cls.__name__, cls.__bases__, orig_vars)
+ return wrapper
+
+
+def python_2_unicode_compatible(klass):
+ """
+ A decorator that defines __unicode__ and __str__ methods under Python 2.
+ Under Python 3 it does nothing.
+
+ To support Python 2 and 3 with a single code base, define a __str__ method
+ returning text and apply this decorator to the class.
+ """
+ if PY2:
+ if '__str__' not in klass.__dict__:
+ raise ValueError("@python_2_unicode_compatible cannot be applied "
+ "to %s because it doesn't define __str__()." %
+ klass.__name__)
+ klass.__unicode__ = klass.__str__
+ klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
+ return klass
+
+
+# Complete the moves implementation.
+# This code is at the end of this module to speed up module loading.
+# Turn this module into a package.
+__path__ = [] # required for PEP 302 and PEP 451
+__package__ = __name__ # see PEP 366 @ReservedAssignment
+if globals().get("__spec__") is not None:
+ __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
+# Remove other six meta path importers, since they cause problems. This can
+# happen if six is removed from sys.modules and then reloaded. (Setuptools does
+# this for some reason.)
+if sys.meta_path:
+ for i, importer in enumerate(sys.meta_path):
+ # Here's some real nastiness: Another "instance" of the six module might
+ # be floating around. Therefore, we can't use isinstance() to check for
+ # the six meta path importer, since the other six instance will have
+ # inserted an importer with different class.
+ if (type(importer).__name__ == "_SixMetaPathImporter" and
+ importer.name == __name__):
+ del sys.meta_path[i]
+ break
+ del i, importer
+# Finally, add the importer to the meta path import hook.
+sys.meta_path.append(_importer)
diff --git a/lib/spack/external/yaml/README b/lib/spack/external/yaml/README
index c1edf13870..d186328eeb 100644
--- a/lib/spack/external/yaml/README
+++ b/lib/spack/external/yaml/README
@@ -28,7 +28,7 @@ Post your questions and opinions to the YAML-Core mailing list:
'http://lists.sourceforge.net/lists/listinfo/yaml-core'.
Submit bug reports and feature requests to the PyYAML bug tracker:
-'http://pyyaml.org/newticket?component=pyyaml'.
+'https://bitbucket.org/xi/pyyaml/issues/new'.
PyYAML is written by Kirill Simonov <xi@resolvent.net>. It is released
under the MIT license. See the file LICENSE for more details.
diff --git a/lib/spack/external/yaml/__init__.py b/lib/spack/external/yaml/lib/yaml/__init__.py
index f977f46ba7..87c15d38aa 100644
--- a/lib/spack/external/yaml/__init__.py
+++ b/lib/spack/external/yaml/lib/yaml/__init__.py
@@ -8,7 +8,7 @@ from nodes import *
from loader import *
from dumper import *
-__version__ = '3.10'
+__version__ = '3.12'
try:
from cyaml import *
diff --git a/lib/spack/external/yaml/composer.py b/lib/spack/external/yaml/lib/yaml/composer.py
index 06e5ac782f..06e5ac782f 100644
--- a/lib/spack/external/yaml/composer.py
+++ b/lib/spack/external/yaml/lib/yaml/composer.py
diff --git a/lib/spack/external/yaml/constructor.py b/lib/spack/external/yaml/lib/yaml/constructor.py
index 8c0ec181b2..635faac3e6 100644
--- a/lib/spack/external/yaml/constructor.py
+++ b/lib/spack/external/yaml/lib/yaml/constructor.py
@@ -131,9 +131,6 @@ class BaseConstructor(object):
raise ConstructorError("while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
- if key in mapping:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found already in-use key (%s)" % key, key_node.start_mark)
mapping[key] = value
return mapping
diff --git a/lib/spack/external/yaml/lib/yaml/cyaml.py b/lib/spack/external/yaml/lib/yaml/cyaml.py
new file mode 100644
index 0000000000..68dcd75192
--- /dev/null
+++ b/lib/spack/external/yaml/lib/yaml/cyaml.py
@@ -0,0 +1,85 @@
+
+__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader',
+ 'CBaseDumper', 'CSafeDumper', 'CDumper']
+
+from _yaml import CParser, CEmitter
+
+from constructor import *
+
+from serializer import *
+from representer import *
+
+from resolver import *
+
+class CBaseLoader(CParser, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class CSafeLoader(CParser, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CLoader(CParser, Constructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CDumper(CEmitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
diff --git a/lib/spack/external/yaml/dumper.py b/lib/spack/external/yaml/lib/yaml/dumper.py
index f811d2c919..f811d2c919 100644
--- a/lib/spack/external/yaml/dumper.py
+++ b/lib/spack/external/yaml/lib/yaml/dumper.py
diff --git a/lib/spack/external/yaml/emitter.py b/lib/spack/external/yaml/lib/yaml/emitter.py
index e5bcdcccbb..e5bcdcccbb 100644
--- a/lib/spack/external/yaml/emitter.py
+++ b/lib/spack/external/yaml/lib/yaml/emitter.py
diff --git a/lib/spack/external/yaml/error.py b/lib/spack/external/yaml/lib/yaml/error.py
index 577686db5f..577686db5f 100644
--- a/lib/spack/external/yaml/error.py
+++ b/lib/spack/external/yaml/lib/yaml/error.py
diff --git a/lib/spack/external/yaml/events.py b/lib/spack/external/yaml/lib/yaml/events.py
index f79ad389cb..f79ad389cb 100644
--- a/lib/spack/external/yaml/events.py
+++ b/lib/spack/external/yaml/lib/yaml/events.py
diff --git a/lib/spack/external/yaml/loader.py b/lib/spack/external/yaml/lib/yaml/loader.py
index 293ff467b1..293ff467b1 100644
--- a/lib/spack/external/yaml/loader.py
+++ b/lib/spack/external/yaml/lib/yaml/loader.py
diff --git a/lib/spack/external/yaml/nodes.py b/lib/spack/external/yaml/lib/yaml/nodes.py
index c4f070c41e..c4f070c41e 100644
--- a/lib/spack/external/yaml/nodes.py
+++ b/lib/spack/external/yaml/lib/yaml/nodes.py
diff --git a/lib/spack/external/yaml/parser.py b/lib/spack/external/yaml/lib/yaml/parser.py
index f9e3057f33..f9e3057f33 100644
--- a/lib/spack/external/yaml/parser.py
+++ b/lib/spack/external/yaml/lib/yaml/parser.py
diff --git a/lib/spack/external/yaml/reader.py b/lib/spack/external/yaml/lib/yaml/reader.py
index a67af7c5da..3249e6b9f5 100644
--- a/lib/spack/external/yaml/reader.py
+++ b/lib/spack/external/yaml/lib/yaml/reader.py
@@ -56,7 +56,8 @@ class Reader(object):
# Yeah, it's ugly and slow.
- def __init__(self, stream, name=None):
+ def __init__(self, stream):
+ self.name = None
self.stream = None
self.stream_pointer = 0
self.eof = True
@@ -69,16 +70,16 @@ class Reader(object):
self.line = 0
self.column = 0
if isinstance(stream, unicode):
- self.name = "<unicode string>" if name is None else name
+ self.name = "<unicode string>"
self.check_printable(stream)
self.buffer = stream+u'\0'
elif isinstance(stream, str):
- self.name = "<string>" if name is None else name
+ self.name = "<string>"
self.raw_buffer = stream
self.determine_encoding()
else:
self.stream = stream
- self.name = getattr(stream, 'name', "<file>") if name is None else name
+ self.name = getattr(stream, 'name', "<file>")
self.eof = False
self.raw_buffer = ''
self.determine_encoding()
diff --git a/lib/spack/external/yaml/representer.py b/lib/spack/external/yaml/lib/yaml/representer.py
index 5f4fc70dbc..4ea8cb1fe1 100644
--- a/lib/spack/external/yaml/representer.py
+++ b/lib/spack/external/yaml/lib/yaml/representer.py
@@ -139,7 +139,9 @@ class BaseRepresenter(object):
class SafeRepresenter(BaseRepresenter):
def ignore_aliases(self, data):
- if data in [None, ()]:
+ if data is None:
+ return True
+ if isinstance(data, tuple) and data == ():
return True
if isinstance(data, (str, unicode, bool, int, float)):
return True
diff --git a/lib/spack/external/yaml/resolver.py b/lib/spack/external/yaml/lib/yaml/resolver.py
index 6b5ab87596..528fbc0ead 100644
--- a/lib/spack/external/yaml/resolver.py
+++ b/lib/spack/external/yaml/lib/yaml/resolver.py
@@ -24,7 +24,10 @@ class BaseResolver(object):
def add_implicit_resolver(cls, tag, regexp, first):
if not 'yaml_implicit_resolvers' in cls.__dict__:
- cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy()
+ implicit_resolvers = {}
+ for key in cls.yaml_implicit_resolvers:
+ implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
+ cls.yaml_implicit_resolvers = implicit_resolvers
if first is None:
first = [None]
for ch in first:
diff --git a/lib/spack/external/yaml/scanner.py b/lib/spack/external/yaml/lib/yaml/scanner.py
index 5228fad65c..834f662a4c 100644
--- a/lib/spack/external/yaml/scanner.py
+++ b/lib/spack/external/yaml/lib/yaml/scanner.py
@@ -286,7 +286,7 @@ class Scanner(object):
or self.index-key.index > 1024:
if key.required:
raise ScannerError("while scanning a simple key", key.mark,
- "could not found expected ':'", self.get_mark())
+ "could not find expected ':'", self.get_mark())
del self.possible_simple_keys[level]
def save_possible_simple_key(self):
@@ -297,10 +297,6 @@ class Scanner(object):
# Check if a simple key is required at the current position.
required = not self.flow_level and self.indent == self.column
- # A simple key is required only if it is the first token in the current
- # line. Therefore it is always allowed.
- assert self.allow_simple_key or not required
-
# The next token might be a simple key. Let's save it's number and
# position.
if self.allow_simple_key:
@@ -317,7 +313,7 @@ class Scanner(object):
if key.required:
raise ScannerError("while scanning a simple key", key.mark,
- "could not found expected ':'", self.get_mark())
+ "could not find expected ':'", self.get_mark())
del self.possible_simple_keys[self.flow_level]
diff --git a/lib/spack/external/yaml/serializer.py b/lib/spack/external/yaml/lib/yaml/serializer.py
index 0bf1e96dc1..0bf1e96dc1 100644
--- a/lib/spack/external/yaml/serializer.py
+++ b/lib/spack/external/yaml/lib/yaml/serializer.py
diff --git a/lib/spack/external/yaml/tokens.py b/lib/spack/external/yaml/lib/yaml/tokens.py
index 4d0b48a394..4d0b48a394 100644
--- a/lib/spack/external/yaml/tokens.py
+++ b/lib/spack/external/yaml/lib/yaml/tokens.py
diff --git a/lib/spack/external/yaml/lib3/yaml/__init__.py b/lib/spack/external/yaml/lib3/yaml/__init__.py
new file mode 100644
index 0000000000..d7d27fe63b
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/__init__.py
@@ -0,0 +1,312 @@
+
+from .error import *
+
+from .tokens import *
+from .events import *
+from .nodes import *
+
+from .loader import *
+from .dumper import *
+
+__version__ = '3.12'
+try:
+ from .cyaml import *
+ __with_libyaml__ = True
+except ImportError:
+ __with_libyaml__ = False
+
+import io
+
+def scan(stream, Loader=Loader):
+ """
+ Scan a YAML stream and produce scanning tokens.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_token():
+ yield loader.get_token()
+ finally:
+ loader.dispose()
+
+def parse(stream, Loader=Loader):
+ """
+ Parse a YAML stream and produce parsing events.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_event():
+ yield loader.get_event()
+ finally:
+ loader.dispose()
+
+def compose(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding representation tree.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_node()
+ finally:
+ loader.dispose()
+
+def compose_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding representation trees.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_node():
+ yield loader.get_node()
+ finally:
+ loader.dispose()
+
+def load(stream, Loader=Loader):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ """
+ loader = Loader(stream)
+ try:
+ return loader.get_single_data()
+ finally:
+ loader.dispose()
+
+def load_all(stream, Loader=Loader):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ """
+ loader = Loader(stream)
+ try:
+ while loader.check_data():
+ yield loader.get_data()
+ finally:
+ loader.dispose()
+
+def safe_load(stream):
+ """
+ Parse the first YAML document in a stream
+ and produce the corresponding Python object.
+ Resolve only basic YAML tags.
+ """
+ return load(stream, SafeLoader)
+
+def safe_load_all(stream):
+ """
+ Parse all YAML documents in a stream
+ and produce corresponding Python objects.
+ Resolve only basic YAML tags.
+ """
+ return load_all(stream, SafeLoader)
+
+def emit(events, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+ """
+ Emit YAML parsing events into a stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ stream = io.StringIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ try:
+ for event in events:
+ dumper.emit(event)
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize_all(nodes, stream=None, Dumper=Dumper,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of representation trees into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ stream = io.StringIO()
+ else:
+ stream = io.BytesIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for node in nodes:
+ dumper.serialize(node)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def serialize(node, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a representation tree into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return serialize_all([node], stream, Dumper=Dumper, **kwds)
+
+def dump_all(documents, stream=None, Dumper=Dumper,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ getvalue = None
+ if stream is None:
+ if encoding is None:
+ stream = io.StringIO()
+ else:
+ stream = io.BytesIO()
+ getvalue = stream.getvalue
+ dumper = Dumper(stream, default_style=default_style,
+ default_flow_style=default_flow_style,
+ canonical=canonical, indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break,
+ encoding=encoding, version=version, tags=tags,
+ explicit_start=explicit_start, explicit_end=explicit_end)
+ try:
+ dumper.open()
+ for data in documents:
+ dumper.represent(data)
+ dumper.close()
+ finally:
+ dumper.dispose()
+ if getvalue:
+ return getvalue()
+
+def dump(data, stream=None, Dumper=Dumper, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=Dumper, **kwds)
+
+def safe_dump_all(documents, stream=None, **kwds):
+ """
+ Serialize a sequence of Python objects into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all(documents, stream, Dumper=SafeDumper, **kwds)
+
+def safe_dump(data, stream=None, **kwds):
+ """
+ Serialize a Python object into a YAML stream.
+ Produce only basic YAML tags.
+ If stream is None, return the produced string instead.
+ """
+ return dump_all([data], stream, Dumper=SafeDumper, **kwds)
+
+def add_implicit_resolver(tag, regexp, first=None,
+ Loader=Loader, Dumper=Dumper):
+ """
+ Add an implicit scalar detector.
+ If an implicit scalar value matches the given regexp,
+ the corresponding tag is assigned to the scalar.
+ first is a sequence of possible initial characters or None.
+ """
+ Loader.add_implicit_resolver(tag, regexp, first)
+ Dumper.add_implicit_resolver(tag, regexp, first)
+
+def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper):
+ """
+ Add a path based resolver for the given tag.
+ A path is a list of keys that forms a path
+ to a node in the representation tree.
+ Keys can be string values, integers, or None.
+ """
+ Loader.add_path_resolver(tag, path, kind)
+ Dumper.add_path_resolver(tag, path, kind)
+
+def add_constructor(tag, constructor, Loader=Loader):
+ """
+ Add a constructor for the given tag.
+ Constructor is a function that accepts a Loader instance
+ and a node object and produces the corresponding Python object.
+ """
+ Loader.add_constructor(tag, constructor)
+
+def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader):
+ """
+ Add a multi-constructor for the given tag prefix.
+ Multi-constructor is called for a node if its tag starts with tag_prefix.
+ Multi-constructor accepts a Loader instance, a tag suffix,
+ and a node object and produces the corresponding Python object.
+ """
+ Loader.add_multi_constructor(tag_prefix, multi_constructor)
+
+def add_representer(data_type, representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Representer is a function accepting a Dumper instance
+ and an instance of the given data type
+ and producing the corresponding representation node.
+ """
+ Dumper.add_representer(data_type, representer)
+
+def add_multi_representer(data_type, multi_representer, Dumper=Dumper):
+ """
+ Add a representer for the given type.
+ Multi-representer is a function accepting a Dumper instance
+ and an instance of the given data type or subtype
+ and producing the corresponding representation node.
+ """
+ Dumper.add_multi_representer(data_type, multi_representer)
+
+class YAMLObjectMetaclass(type):
+ """
+ The metaclass for YAMLObject.
+ """
+ def __init__(cls, name, bases, kwds):
+ super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds)
+ if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None:
+ cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml)
+ cls.yaml_dumper.add_representer(cls, cls.to_yaml)
+
+class YAMLObject(metaclass=YAMLObjectMetaclass):
+ """
+ An object that can dump itself to a YAML stream
+ and load itself from a YAML stream.
+ """
+
+ __slots__ = () # no direct instantiation, so allow immutable subclasses
+
+ yaml_loader = Loader
+ yaml_dumper = Dumper
+
+ yaml_tag = None
+ yaml_flow_style = None
+
+ @classmethod
+ def from_yaml(cls, loader, node):
+ """
+ Convert a representation node to a Python object.
+ """
+ return loader.construct_yaml_object(node, cls)
+
+ @classmethod
+ def to_yaml(cls, dumper, data):
+ """
+ Convert a Python object to a representation node.
+ """
+ return dumper.represent_yaml_object(cls.yaml_tag, data, cls,
+ flow_style=cls.yaml_flow_style)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/composer.py b/lib/spack/external/yaml/lib3/yaml/composer.py
new file mode 100644
index 0000000000..d5c6a7acd9
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/composer.py
@@ -0,0 +1,139 @@
+
+__all__ = ['Composer', 'ComposerError']
+
+from .error import MarkedYAMLError
+from .events import *
+from .nodes import *
+
+class ComposerError(MarkedYAMLError):
+ pass
+
+class Composer:
+
+ def __init__(self):
+ self.anchors = {}
+
+ def check_node(self):
+ # Drop the STREAM-START event.
+ if self.check_event(StreamStartEvent):
+ self.get_event()
+
+ # If there are more documents available?
+ return not self.check_event(StreamEndEvent)
+
+ def get_node(self):
+ # Get the root node of the next document.
+ if not self.check_event(StreamEndEvent):
+ return self.compose_document()
+
+ def get_single_node(self):
+ # Drop the STREAM-START event.
+ self.get_event()
+
+ # Compose a document if the stream is not empty.
+ document = None
+ if not self.check_event(StreamEndEvent):
+ document = self.compose_document()
+
+ # Ensure that the stream contains no more documents.
+ if not self.check_event(StreamEndEvent):
+ event = self.get_event()
+ raise ComposerError("expected a single document in the stream",
+ document.start_mark, "but found another document",
+ event.start_mark)
+
+ # Drop the STREAM-END event.
+ self.get_event()
+
+ return document
+
+ def compose_document(self):
+ # Drop the DOCUMENT-START event.
+ self.get_event()
+
+ # Compose the root node.
+ node = self.compose_node(None, None)
+
+ # Drop the DOCUMENT-END event.
+ self.get_event()
+
+ self.anchors = {}
+ return node
+
+ def compose_node(self, parent, index):
+ if self.check_event(AliasEvent):
+ event = self.get_event()
+ anchor = event.anchor
+ if anchor not in self.anchors:
+ raise ComposerError(None, None, "found undefined alias %r"
+ % anchor, event.start_mark)
+ return self.anchors[anchor]
+ event = self.peek_event()
+ anchor = event.anchor
+ if anchor is not None:
+ if anchor in self.anchors:
+ raise ComposerError("found duplicate anchor %r; first occurence"
+ % anchor, self.anchors[anchor].start_mark,
+ "second occurence", event.start_mark)
+ self.descend_resolver(parent, index)
+ if self.check_event(ScalarEvent):
+ node = self.compose_scalar_node(anchor)
+ elif self.check_event(SequenceStartEvent):
+ node = self.compose_sequence_node(anchor)
+ elif self.check_event(MappingStartEvent):
+ node = self.compose_mapping_node(anchor)
+ self.ascend_resolver()
+ return node
+
+ def compose_scalar_node(self, anchor):
+ event = self.get_event()
+ tag = event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(ScalarNode, event.value, event.implicit)
+ node = ScalarNode(tag, event.value,
+ event.start_mark, event.end_mark, style=event.style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ return node
+
+ def compose_sequence_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(SequenceNode, None, start_event.implicit)
+ node = SequenceNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ index = 0
+ while not self.check_event(SequenceEndEvent):
+ node.value.append(self.compose_node(node, index))
+ index += 1
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
+ def compose_mapping_node(self, anchor):
+ start_event = self.get_event()
+ tag = start_event.tag
+ if tag is None or tag == '!':
+ tag = self.resolve(MappingNode, None, start_event.implicit)
+ node = MappingNode(tag, [],
+ start_event.start_mark, None,
+ flow_style=start_event.flow_style)
+ if anchor is not None:
+ self.anchors[anchor] = node
+ while not self.check_event(MappingEndEvent):
+ #key_event = self.peek_event()
+ item_key = self.compose_node(node, None)
+ #if item_key in node.value:
+ # raise ComposerError("while composing a mapping", start_event.start_mark,
+ # "found duplicate key", key_event.start_mark)
+ item_value = self.compose_node(node, item_key)
+ #node.value[item_key] = item_value
+ node.value.append((item_key, item_value))
+ end_event = self.get_event()
+ node.end_mark = end_event.end_mark
+ return node
+
diff --git a/lib/spack/external/yaml/lib3/yaml/constructor.py b/lib/spack/external/yaml/lib3/yaml/constructor.py
new file mode 100644
index 0000000000..981543aebb
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/constructor.py
@@ -0,0 +1,686 @@
+
+__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor',
+ 'ConstructorError']
+
+from .error import *
+from .nodes import *
+
+import collections, datetime, base64, binascii, re, sys, types
+
+class ConstructorError(MarkedYAMLError):
+ pass
+
+class BaseConstructor:
+
+ yaml_constructors = {}
+ yaml_multi_constructors = {}
+
+ def __init__(self):
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.state_generators = []
+ self.deep_construct = False
+
+ def check_data(self):
+ # If there are more documents available?
+ return self.check_node()
+
+ def get_data(self):
+ # Construct and return the next document.
+ if self.check_node():
+ return self.construct_document(self.get_node())
+
+ def get_single_data(self):
+ # Ensure that the stream contains a single document and construct it.
+ node = self.get_single_node()
+ if node is not None:
+ return self.construct_document(node)
+ return None
+
+ def construct_document(self, node):
+ data = self.construct_object(node)
+ while self.state_generators:
+ state_generators = self.state_generators
+ self.state_generators = []
+ for generator in state_generators:
+ for dummy in generator:
+ pass
+ self.constructed_objects = {}
+ self.recursive_objects = {}
+ self.deep_construct = False
+ return data
+
+ def construct_object(self, node, deep=False):
+ if node in self.constructed_objects:
+ return self.constructed_objects[node]
+ if deep:
+ old_deep = self.deep_construct
+ self.deep_construct = True
+ if node in self.recursive_objects:
+ raise ConstructorError(None, None,
+ "found unconstructable recursive node", node.start_mark)
+ self.recursive_objects[node] = None
+ constructor = None
+ tag_suffix = None
+ if node.tag in self.yaml_constructors:
+ constructor = self.yaml_constructors[node.tag]
+ else:
+ for tag_prefix in self.yaml_multi_constructors:
+ if node.tag.startswith(tag_prefix):
+ tag_suffix = node.tag[len(tag_prefix):]
+ constructor = self.yaml_multi_constructors[tag_prefix]
+ break
+ else:
+ if None in self.yaml_multi_constructors:
+ tag_suffix = node.tag
+ constructor = self.yaml_multi_constructors[None]
+ elif None in self.yaml_constructors:
+ constructor = self.yaml_constructors[None]
+ elif isinstance(node, ScalarNode):
+ constructor = self.__class__.construct_scalar
+ elif isinstance(node, SequenceNode):
+ constructor = self.__class__.construct_sequence
+ elif isinstance(node, MappingNode):
+ constructor = self.__class__.construct_mapping
+ if tag_suffix is None:
+ data = constructor(self, node)
+ else:
+ data = constructor(self, tag_suffix, node)
+ if isinstance(data, types.GeneratorType):
+ generator = data
+ data = next(generator)
+ if self.deep_construct:
+ for dummy in generator:
+ pass
+ else:
+ self.state_generators.append(generator)
+ self.constructed_objects[node] = data
+ del self.recursive_objects[node]
+ if deep:
+ self.deep_construct = old_deep
+ return data
+
+ def construct_scalar(self, node):
+ if not isinstance(node, ScalarNode):
+ raise ConstructorError(None, None,
+ "expected a scalar node, but found %s" % node.id,
+ node.start_mark)
+ return node.value
+
+ def construct_sequence(self, node, deep=False):
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError(None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ return [self.construct_object(child, deep=deep)
+ for child in node.value]
+
+ def construct_mapping(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ mapping = {}
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ if not isinstance(key, collections.Hashable):
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "found unhashable key", key_node.start_mark)
+ value = self.construct_object(value_node, deep=deep)
+ mapping[key] = value
+ return mapping
+
+ def construct_pairs(self, node, deep=False):
+ if not isinstance(node, MappingNode):
+ raise ConstructorError(None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
+ pairs = []
+ for key_node, value_node in node.value:
+ key = self.construct_object(key_node, deep=deep)
+ value = self.construct_object(value_node, deep=deep)
+ pairs.append((key, value))
+ return pairs
+
+ @classmethod
+ def add_constructor(cls, tag, constructor):
+ if not 'yaml_constructors' in cls.__dict__:
+ cls.yaml_constructors = cls.yaml_constructors.copy()
+ cls.yaml_constructors[tag] = constructor
+
+ @classmethod
+ def add_multi_constructor(cls, tag_prefix, multi_constructor):
+ if not 'yaml_multi_constructors' in cls.__dict__:
+ cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy()
+ cls.yaml_multi_constructors[tag_prefix] = multi_constructor
+
+class SafeConstructor(BaseConstructor):
+
+ def construct_scalar(self, node):
+ if isinstance(node, MappingNode):
+ for key_node, value_node in node.value:
+ if key_node.tag == 'tag:yaml.org,2002:value':
+ return self.construct_scalar(value_node)
+ return super().construct_scalar(node)
+
+ def flatten_mapping(self, node):
+ merge = []
+ index = 0
+ while index < len(node.value):
+ key_node, value_node = node.value[index]
+ if key_node.tag == 'tag:yaml.org,2002:merge':
+ del node.value[index]
+ if isinstance(value_node, MappingNode):
+ self.flatten_mapping(value_node)
+ merge.extend(value_node.value)
+ elif isinstance(value_node, SequenceNode):
+ submerge = []
+ for subnode in value_node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing a mapping",
+ node.start_mark,
+ "expected a mapping for merging, but found %s"
+ % subnode.id, subnode.start_mark)
+ self.flatten_mapping(subnode)
+ submerge.append(subnode.value)
+ submerge.reverse()
+ for value in submerge:
+ merge.extend(value)
+ else:
+ raise ConstructorError("while constructing a mapping", node.start_mark,
+ "expected a mapping or list of mappings for merging, but found %s"
+ % value_node.id, value_node.start_mark)
+ elif key_node.tag == 'tag:yaml.org,2002:value':
+ key_node.tag = 'tag:yaml.org,2002:str'
+ index += 1
+ else:
+ index += 1
+ if merge:
+ node.value = merge + node.value
+
+ def construct_mapping(self, node, deep=False):
+ if isinstance(node, MappingNode):
+ self.flatten_mapping(node)
+ return super().construct_mapping(node, deep=deep)
+
+ def construct_yaml_null(self, node):
+ self.construct_scalar(node)
+ return None
+
+ bool_values = {
+ 'yes': True,
+ 'no': False,
+ 'true': True,
+ 'false': False,
+ 'on': True,
+ 'off': False,
+ }
+
+ def construct_yaml_bool(self, node):
+ value = self.construct_scalar(node)
+ return self.bool_values[value.lower()]
+
+ def construct_yaml_int(self, node):
+ value = self.construct_scalar(node)
+ value = value.replace('_', '')
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '0':
+ return 0
+ elif value.startswith('0b'):
+ return sign*int(value[2:], 2)
+ elif value.startswith('0x'):
+ return sign*int(value[2:], 16)
+ elif value[0] == '0':
+ return sign*int(value, 8)
+ elif ':' in value:
+ digits = [int(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*int(value)
+
+ inf_value = 1e300
+ while inf_value != inf_value*inf_value:
+ inf_value *= inf_value
+ nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99).
+
+ def construct_yaml_float(self, node):
+ value = self.construct_scalar(node)
+ value = value.replace('_', '').lower()
+ sign = +1
+ if value[0] == '-':
+ sign = -1
+ if value[0] in '+-':
+ value = value[1:]
+ if value == '.inf':
+ return sign*self.inf_value
+ elif value == '.nan':
+ return self.nan_value
+ elif ':' in value:
+ digits = [float(part) for part in value.split(':')]
+ digits.reverse()
+ base = 1
+ value = 0.0
+ for digit in digits:
+ value += digit*base
+ base *= 60
+ return sign*value
+ else:
+ return sign*float(value)
+
+ def construct_yaml_binary(self, node):
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(None, None,
+ "failed to convert base64 data into ascii: %s" % exc,
+ node.start_mark)
+ try:
+ if hasattr(base64, 'decodebytes'):
+ return base64.decodebytes(value)
+ else:
+ return base64.decodestring(value)
+ except binascii.Error as exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ timestamp_regexp = re.compile(
+ r'''^(?P<year>[0-9][0-9][0-9][0-9])
+ -(?P<month>[0-9][0-9]?)
+ -(?P<day>[0-9][0-9]?)
+ (?:(?:[Tt]|[ \t]+)
+ (?P<hour>[0-9][0-9]?)
+ :(?P<minute>[0-9][0-9])
+ :(?P<second>[0-9][0-9])
+ (?:\.(?P<fraction>[0-9]*))?
+ (?:[ \t]*(?P<tz>Z|(?P<tz_sign>[-+])(?P<tz_hour>[0-9][0-9]?)
+ (?::(?P<tz_minute>[0-9][0-9]))?))?)?$''', re.X)
+
+ def construct_yaml_timestamp(self, node):
+ value = self.construct_scalar(node)
+ match = self.timestamp_regexp.match(node.value)
+ values = match.groupdict()
+ year = int(values['year'])
+ month = int(values['month'])
+ day = int(values['day'])
+ if not values['hour']:
+ return datetime.date(year, month, day)
+ hour = int(values['hour'])
+ minute = int(values['minute'])
+ second = int(values['second'])
+ fraction = 0
+ if values['fraction']:
+ fraction = values['fraction'][:6]
+ while len(fraction) < 6:
+ fraction += '0'
+ fraction = int(fraction)
+ delta = None
+ if values['tz_sign']:
+ tz_hour = int(values['tz_hour'])
+ tz_minute = int(values['tz_minute'] or 0)
+ delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute)
+ if values['tz_sign'] == '-':
+ delta = -delta
+ data = datetime.datetime(year, month, day, hour, minute, second, fraction)
+ if delta:
+ data -= delta
+ return data
+
+ def construct_yaml_omap(self, node):
+ # Note: we do not check for duplicate keys, because it's too
+ # CPU-expensive.
+ omap = []
+ yield omap
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing an ordered map", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ omap.append((key, value))
+
+ def construct_yaml_pairs(self, node):
+ # Note: the same code as `construct_yaml_omap`.
+ pairs = []
+ yield pairs
+ if not isinstance(node, SequenceNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a sequence, but found %s" % node.id, node.start_mark)
+ for subnode in node.value:
+ if not isinstance(subnode, MappingNode):
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a mapping of length 1, but found %s" % subnode.id,
+ subnode.start_mark)
+ if len(subnode.value) != 1:
+ raise ConstructorError("while constructing pairs", node.start_mark,
+ "expected a single mapping item, but found %d items" % len(subnode.value),
+ subnode.start_mark)
+ key_node, value_node = subnode.value[0]
+ key = self.construct_object(key_node)
+ value = self.construct_object(value_node)
+ pairs.append((key, value))
+
+ def construct_yaml_set(self, node):
+ data = set()
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_str(self, node):
+ return self.construct_scalar(node)
+
+ def construct_yaml_seq(self, node):
+ data = []
+ yield data
+ data.extend(self.construct_sequence(node))
+
+ def construct_yaml_map(self, node):
+ data = {}
+ yield data
+ value = self.construct_mapping(node)
+ data.update(value)
+
+ def construct_yaml_object(self, node, cls):
+ data = cls.__new__(cls)
+ yield data
+ if hasattr(data, '__setstate__'):
+ state = self.construct_mapping(node, deep=True)
+ data.__setstate__(state)
+ else:
+ state = self.construct_mapping(node)
+ data.__dict__.update(state)
+
+ def construct_undefined(self, node):
+ raise ConstructorError(None, None,
+ "could not determine a constructor for the tag %r" % node.tag,
+ node.start_mark)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:null',
+ SafeConstructor.construct_yaml_null)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:bool',
+ SafeConstructor.construct_yaml_bool)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:int',
+ SafeConstructor.construct_yaml_int)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:float',
+ SafeConstructor.construct_yaml_float)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:binary',
+ SafeConstructor.construct_yaml_binary)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:timestamp',
+ SafeConstructor.construct_yaml_timestamp)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:omap',
+ SafeConstructor.construct_yaml_omap)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:pairs',
+ SafeConstructor.construct_yaml_pairs)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:set',
+ SafeConstructor.construct_yaml_set)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:str',
+ SafeConstructor.construct_yaml_str)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:seq',
+ SafeConstructor.construct_yaml_seq)
+
+SafeConstructor.add_constructor(
+ 'tag:yaml.org,2002:map',
+ SafeConstructor.construct_yaml_map)
+
+SafeConstructor.add_constructor(None,
+ SafeConstructor.construct_undefined)
+
+class Constructor(SafeConstructor):
+
+ def construct_python_str(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_unicode(self, node):
+ return self.construct_scalar(node)
+
+ def construct_python_bytes(self, node):
+ try:
+ value = self.construct_scalar(node).encode('ascii')
+ except UnicodeEncodeError as exc:
+ raise ConstructorError(None, None,
+ "failed to convert base64 data into ascii: %s" % exc,
+ node.start_mark)
+ try:
+ if hasattr(base64, 'decodebytes'):
+ return base64.decodebytes(value)
+ else:
+ return base64.decodestring(value)
+ except binascii.Error as exc:
+ raise ConstructorError(None, None,
+ "failed to decode base64 data: %s" % exc, node.start_mark)
+
+ def construct_python_long(self, node):
+ return self.construct_yaml_int(node)
+
+ def construct_python_complex(self, node):
+ return complex(self.construct_scalar(node))
+
+ def construct_python_tuple(self, node):
+ return tuple(self.construct_sequence(node))
+
+ def find_python_module(self, name, mark):
+ if not name:
+ raise ConstructorError("while constructing a Python module", mark,
+ "expected non-empty name appended to the tag", mark)
+ try:
+ __import__(name)
+ except ImportError as exc:
+ raise ConstructorError("while constructing a Python module", mark,
+ "cannot find module %r (%s)" % (name, exc), mark)
+ return sys.modules[name]
+
+ def find_python_name(self, name, mark):
+ if not name:
+ raise ConstructorError("while constructing a Python object", mark,
+ "expected non-empty name appended to the tag", mark)
+ if '.' in name:
+ module_name, object_name = name.rsplit('.', 1)
+ else:
+ module_name = 'builtins'
+ object_name = name
+ try:
+ __import__(module_name)
+ except ImportError as exc:
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find module %r (%s)" % (module_name, exc), mark)
+ module = sys.modules[module_name]
+ if not hasattr(module, object_name):
+ raise ConstructorError("while constructing a Python object", mark,
+ "cannot find %r in the module %r"
+ % (object_name, module.__name__), mark)
+ return getattr(module, object_name)
+
+ def construct_python_name(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python name", node.start_mark,
+ "expected the empty value, but found %r" % value, node.start_mark)
+ return self.find_python_name(suffix, node.start_mark)
+
+ def construct_python_module(self, suffix, node):
+ value = self.construct_scalar(node)
+ if value:
+ raise ConstructorError("while constructing a Python module", node.start_mark,
+ "expected the empty value, but found %r" % value, node.start_mark)
+ return self.find_python_module(suffix, node.start_mark)
+
+ def make_python_instance(self, suffix, node,
+ args=None, kwds=None, newobj=False):
+ if not args:
+ args = []
+ if not kwds:
+ kwds = {}
+ cls = self.find_python_name(suffix, node.start_mark)
+ if newobj and isinstance(cls, type):
+ return cls.__new__(cls, *args, **kwds)
+ else:
+ return cls(*args, **kwds)
+
+ def set_python_instance_state(self, instance, state):
+ if hasattr(instance, '__setstate__'):
+ instance.__setstate__(state)
+ else:
+ slotstate = {}
+ if isinstance(state, tuple) and len(state) == 2:
+ state, slotstate = state
+ if hasattr(instance, '__dict__'):
+ instance.__dict__.update(state)
+ elif state:
+ slotstate.update(state)
+ for key, value in slotstate.items():
+ setattr(object, key, value)
+
+ def construct_python_object(self, suffix, node):
+ # Format:
+ # !!python/object:module.name { ... state ... }
+ instance = self.make_python_instance(suffix, node, newobj=True)
+ yield instance
+ deep = hasattr(instance, '__setstate__')
+ state = self.construct_mapping(node, deep=deep)
+ self.set_python_instance_state(instance, state)
+
+ def construct_python_object_apply(self, suffix, node, newobj=False):
+ # Format:
+ # !!python/object/apply # (or !!python/object/new)
+ # args: [ ... arguments ... ]
+ # kwds: { ... keywords ... }
+ # state: ... state ...
+ # listitems: [ ... listitems ... ]
+ # dictitems: { ... dictitems ... }
+ # or short format:
+ # !!python/object/apply [ ... arguments ... ]
+ # The difference between !!python/object/apply and !!python/object/new
+ # is how an object is created, check make_python_instance for details.
+ if isinstance(node, SequenceNode):
+ args = self.construct_sequence(node, deep=True)
+ kwds = {}
+ state = {}
+ listitems = []
+ dictitems = {}
+ else:
+ value = self.construct_mapping(node, deep=True)
+ args = value.get('args', [])
+ kwds = value.get('kwds', {})
+ state = value.get('state', {})
+ listitems = value.get('listitems', [])
+ dictitems = value.get('dictitems', {})
+ instance = self.make_python_instance(suffix, node, args, kwds, newobj)
+ if state:
+ self.set_python_instance_state(instance, state)
+ if listitems:
+ instance.extend(listitems)
+ if dictitems:
+ for key in dictitems:
+ instance[key] = dictitems[key]
+ return instance
+
+ def construct_python_object_new(self, suffix, node):
+ return self.construct_python_object_apply(suffix, node, newobj=True)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/none',
+ Constructor.construct_yaml_null)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/bool',
+ Constructor.construct_yaml_bool)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/str',
+ Constructor.construct_python_str)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/unicode',
+ Constructor.construct_python_unicode)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/bytes',
+ Constructor.construct_python_bytes)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/int',
+ Constructor.construct_yaml_int)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/long',
+ Constructor.construct_python_long)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/float',
+ Constructor.construct_yaml_float)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/complex',
+ Constructor.construct_python_complex)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/list',
+ Constructor.construct_yaml_seq)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/tuple',
+ Constructor.construct_python_tuple)
+
+Constructor.add_constructor(
+ 'tag:yaml.org,2002:python/dict',
+ Constructor.construct_yaml_map)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/name:',
+ Constructor.construct_python_name)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/module:',
+ Constructor.construct_python_module)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object:',
+ Constructor.construct_python_object)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object/apply:',
+ Constructor.construct_python_object_apply)
+
+Constructor.add_multi_constructor(
+ 'tag:yaml.org,2002:python/object/new:',
+ Constructor.construct_python_object_new)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/cyaml.py b/lib/spack/external/yaml/lib3/yaml/cyaml.py
new file mode 100644
index 0000000000..d5cb87e994
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/cyaml.py
@@ -0,0 +1,85 @@
+
+__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader',
+ 'CBaseDumper', 'CSafeDumper', 'CDumper']
+
+from _yaml import CParser, CEmitter
+
+from .constructor import *
+
+from .serializer import *
+from .representer import *
+
+from .resolver import *
+
+class CBaseLoader(CParser, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class CSafeLoader(CParser, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class CLoader(CParser, Constructor, Resolver):
+
+ def __init__(self, stream):
+ CParser.__init__(self, stream)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
+class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CSafeDumper(CEmitter, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class CDumper(CEmitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ CEmitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width, encoding=encoding,
+ allow_unicode=allow_unicode, line_break=line_break,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/dumper.py b/lib/spack/external/yaml/lib3/yaml/dumper.py
new file mode 100644
index 0000000000..0b69128771
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/dumper.py
@@ -0,0 +1,62 @@
+
+__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
+
+from .emitter import *
+from .serializer import *
+from .representer import *
+from .resolver import *
+
+class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ SafeRepresenter.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
+class Dumper(Emitter, Serializer, Representer, Resolver):
+
+ def __init__(self, stream,
+ default_style=None, default_flow_style=None,
+ canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None,
+ encoding=None, explicit_start=None, explicit_end=None,
+ version=None, tags=None):
+ Emitter.__init__(self, stream, canonical=canonical,
+ indent=indent, width=width,
+ allow_unicode=allow_unicode, line_break=line_break)
+ Serializer.__init__(self, encoding=encoding,
+ explicit_start=explicit_start, explicit_end=explicit_end,
+ version=version, tags=tags)
+ Representer.__init__(self, default_style=default_style,
+ default_flow_style=default_flow_style)
+ Resolver.__init__(self)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/emitter.py b/lib/spack/external/yaml/lib3/yaml/emitter.py
new file mode 100644
index 0000000000..34cb145a5f
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/emitter.py
@@ -0,0 +1,1137 @@
+
+# Emitter expects events obeying the following grammar:
+# stream ::= STREAM-START document* STREAM-END
+# document ::= DOCUMENT-START node DOCUMENT-END
+# node ::= SCALAR | sequence | mapping
+# sequence ::= SEQUENCE-START node* SEQUENCE-END
+# mapping ::= MAPPING-START (node node)* MAPPING-END
+
+__all__ = ['Emitter', 'EmitterError']
+
+from .error import YAMLError
+from .events import *
+
+class EmitterError(YAMLError):
+ pass
+
+class ScalarAnalysis:
+ def __init__(self, scalar, empty, multiline,
+ allow_flow_plain, allow_block_plain,
+ allow_single_quoted, allow_double_quoted,
+ allow_block):
+ self.scalar = scalar
+ self.empty = empty
+ self.multiline = multiline
+ self.allow_flow_plain = allow_flow_plain
+ self.allow_block_plain = allow_block_plain
+ self.allow_single_quoted = allow_single_quoted
+ self.allow_double_quoted = allow_double_quoted
+ self.allow_block = allow_block
+
+class Emitter:
+
+ DEFAULT_TAG_PREFIXES = {
+ '!' : '!',
+ 'tag:yaml.org,2002:' : '!!',
+ }
+
+ def __init__(self, stream, canonical=None, indent=None, width=None,
+ allow_unicode=None, line_break=None):
+
+ # The stream should have the methods `write` and possibly `flush`.
+ self.stream = stream
+
+ # Encoding can be overriden by STREAM-START.
+ self.encoding = None
+
+ # Emitter is a state machine with a stack of states to handle nested
+ # structures.
+ self.states = []
+ self.state = self.expect_stream_start
+
+ # Current event and the event queue.
+ self.events = []
+ self.event = None
+
+ # The current indentation level and the stack of previous indents.
+ self.indents = []
+ self.indent = None
+
+ # Flow level.
+ self.flow_level = 0
+
+ # Contexts.
+ self.root_context = False
+ self.sequence_context = False
+ self.mapping_context = False
+ self.simple_key_context = False
+
+ # Characteristics of the last emitted character:
+ # - current position.
+ # - is it a whitespace?
+ # - is it an indention character
+ # (indentation space, '-', '?', or ':')?
+ self.line = 0
+ self.column = 0
+ self.whitespace = True
+ self.indention = True
+
+ # Whether the document requires an explicit document indicator
+ self.open_ended = False
+
+ # Formatting details.
+ self.canonical = canonical
+ self.allow_unicode = allow_unicode
+ self.best_indent = 2
+ if indent and 1 < indent < 10:
+ self.best_indent = indent
+ self.best_width = 80
+ if width and width > self.best_indent*2:
+ self.best_width = width
+ self.best_line_break = '\n'
+ if line_break in ['\r', '\n', '\r\n']:
+ self.best_line_break = line_break
+
+ # Tag prefixes.
+ self.tag_prefixes = None
+
+ # Prepared anchor and tag.
+ self.prepared_anchor = None
+ self.prepared_tag = None
+
+ # Scalar analysis and style.
+ self.analysis = None
+ self.style = None
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def emit(self, event):
+ self.events.append(event)
+ while not self.need_more_events():
+ self.event = self.events.pop(0)
+ self.state()
+ self.event = None
+
+ # In some cases, we wait for a few next events before emitting.
+
+ def need_more_events(self):
+ if not self.events:
+ return True
+ event = self.events[0]
+ if isinstance(event, DocumentStartEvent):
+ return self.need_events(1)
+ elif isinstance(event, SequenceStartEvent):
+ return self.need_events(2)
+ elif isinstance(event, MappingStartEvent):
+ return self.need_events(3)
+ else:
+ return False
+
+ def need_events(self, count):
+ level = 0
+ for event in self.events[1:]:
+ if isinstance(event, (DocumentStartEvent, CollectionStartEvent)):
+ level += 1
+ elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)):
+ level -= 1
+ elif isinstance(event, StreamEndEvent):
+ level = -1
+ if level < 0:
+ return False
+ return (len(self.events) < count+1)
+
+ def increase_indent(self, flow=False, indentless=False):
+ self.indents.append(self.indent)
+ if self.indent is None:
+ if flow:
+ self.indent = self.best_indent
+ else:
+ self.indent = 0
+ elif not indentless:
+ self.indent += self.best_indent
+
+ # States.
+
+ # Stream handlers.
+
+ def expect_stream_start(self):
+ if isinstance(self.event, StreamStartEvent):
+ if self.event.encoding and not hasattr(self.stream, 'encoding'):
+ self.encoding = self.event.encoding
+ self.write_stream_start()
+ self.state = self.expect_first_document_start
+ else:
+ raise EmitterError("expected StreamStartEvent, but got %s"
+ % self.event)
+
+ def expect_nothing(self):
+ raise EmitterError("expected nothing, but got %s" % self.event)
+
+ # Document handlers.
+
+ def expect_first_document_start(self):
+ return self.expect_document_start(first=True)
+
+ def expect_document_start(self, first=False):
+ if isinstance(self.event, DocumentStartEvent):
+ if (self.event.version or self.event.tags) and self.open_ended:
+ self.write_indicator('...', True)
+ self.write_indent()
+ if self.event.version:
+ version_text = self.prepare_version(self.event.version)
+ self.write_version_directive(version_text)
+ self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy()
+ if self.event.tags:
+ handles = sorted(self.event.tags.keys())
+ for handle in handles:
+ prefix = self.event.tags[handle]
+ self.tag_prefixes[prefix] = handle
+ handle_text = self.prepare_tag_handle(handle)
+ prefix_text = self.prepare_tag_prefix(prefix)
+ self.write_tag_directive(handle_text, prefix_text)
+ implicit = (first and not self.event.explicit and not self.canonical
+ and not self.event.version and not self.event.tags
+ and not self.check_empty_document())
+ if not implicit:
+ self.write_indent()
+ self.write_indicator('---', True)
+ if self.canonical:
+ self.write_indent()
+ self.state = self.expect_document_root
+ elif isinstance(self.event, StreamEndEvent):
+ if self.open_ended:
+ self.write_indicator('...', True)
+ self.write_indent()
+ self.write_stream_end()
+ self.state = self.expect_nothing
+ else:
+ raise EmitterError("expected DocumentStartEvent, but got %s"
+ % self.event)
+
+ def expect_document_end(self):
+ if isinstance(self.event, DocumentEndEvent):
+ self.write_indent()
+ if self.event.explicit:
+ self.write_indicator('...', True)
+ self.write_indent()
+ self.flush_stream()
+ self.state = self.expect_document_start
+ else:
+ raise EmitterError("expected DocumentEndEvent, but got %s"
+ % self.event)
+
+ def expect_document_root(self):
+ self.states.append(self.expect_document_end)
+ self.expect_node(root=True)
+
+ # Node handlers.
+
+ def expect_node(self, root=False, sequence=False, mapping=False,
+ simple_key=False):
+ self.root_context = root
+ self.sequence_context = sequence
+ self.mapping_context = mapping
+ self.simple_key_context = simple_key
+ if isinstance(self.event, AliasEvent):
+ self.expect_alias()
+ elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)):
+ self.process_anchor('&')
+ self.process_tag()
+ if isinstance(self.event, ScalarEvent):
+ self.expect_scalar()
+ elif isinstance(self.event, SequenceStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_sequence():
+ self.expect_flow_sequence()
+ else:
+ self.expect_block_sequence()
+ elif isinstance(self.event, MappingStartEvent):
+ if self.flow_level or self.canonical or self.event.flow_style \
+ or self.check_empty_mapping():
+ self.expect_flow_mapping()
+ else:
+ self.expect_block_mapping()
+ else:
+ raise EmitterError("expected NodeEvent, but got %s" % self.event)
+
+ def expect_alias(self):
+ if self.event.anchor is None:
+ raise EmitterError("anchor is not specified for alias")
+ self.process_anchor('*')
+ self.state = self.states.pop()
+
+ def expect_scalar(self):
+ self.increase_indent(flow=True)
+ self.process_scalar()
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+
+ # Flow sequence handlers.
+
+ def expect_flow_sequence(self):
+ self.write_indicator('[', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_sequence_item
+
+ def expect_first_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator(']', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ def expect_flow_sequence_item(self):
+ if isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(',', False)
+ self.write_indent()
+ self.write_indicator(']', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.states.append(self.expect_flow_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Flow mapping handlers.
+
+ def expect_flow_mapping(self):
+ self.write_indicator('{', True, whitespace=True)
+ self.flow_level += 1
+ self.increase_indent(flow=True)
+ self.state = self.expect_first_flow_mapping_key
+
+ def expect_first_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ self.write_indicator('}', False)
+ self.state = self.states.pop()
+ else:
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_key(self):
+ if isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.flow_level -= 1
+ if self.canonical:
+ self.write_indicator(',', False)
+ self.write_indent()
+ self.write_indicator('}', False)
+ self.state = self.states.pop()
+ else:
+ self.write_indicator(',', False)
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ if not self.canonical and self.check_simple_key():
+ self.states.append(self.expect_flow_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True)
+ self.states.append(self.expect_flow_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_simple_value(self):
+ self.write_indicator(':', False)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_flow_mapping_value(self):
+ if self.canonical or self.column > self.best_width:
+ self.write_indent()
+ self.write_indicator(':', True)
+ self.states.append(self.expect_flow_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Block sequence handlers.
+
+ def expect_block_sequence(self):
+ indentless = (self.mapping_context and not self.indention)
+ self.increase_indent(flow=False, indentless=indentless)
+ self.state = self.expect_first_block_sequence_item
+
+ def expect_first_block_sequence_item(self):
+ return self.expect_block_sequence_item(first=True)
+
+ def expect_block_sequence_item(self, first=False):
+ if not first and isinstance(self.event, SequenceEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ self.write_indicator('-', True, indention=True)
+ self.states.append(self.expect_block_sequence_item)
+ self.expect_node(sequence=True)
+
+ # Block mapping handlers.
+
+ def expect_block_mapping(self):
+ self.increase_indent(flow=False)
+ self.state = self.expect_first_block_mapping_key
+
+ def expect_first_block_mapping_key(self):
+ return self.expect_block_mapping_key(first=True)
+
+ def expect_block_mapping_key(self, first=False):
+ if not first and isinstance(self.event, MappingEndEvent):
+ self.indent = self.indents.pop()
+ self.state = self.states.pop()
+ else:
+ self.write_indent()
+ if self.check_simple_key():
+ self.states.append(self.expect_block_mapping_simple_value)
+ self.expect_node(mapping=True, simple_key=True)
+ else:
+ self.write_indicator('?', True, indention=True)
+ self.states.append(self.expect_block_mapping_value)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_simple_value(self):
+ self.write_indicator(':', False)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ def expect_block_mapping_value(self):
+ self.write_indent()
+ self.write_indicator(':', True, indention=True)
+ self.states.append(self.expect_block_mapping_key)
+ self.expect_node(mapping=True)
+
+ # Checkers.
+
+ def check_empty_sequence(self):
+ return (isinstance(self.event, SequenceStartEvent) and self.events
+ and isinstance(self.events[0], SequenceEndEvent))
+
+ def check_empty_mapping(self):
+ return (isinstance(self.event, MappingStartEvent) and self.events
+ and isinstance(self.events[0], MappingEndEvent))
+
+ def check_empty_document(self):
+ if not isinstance(self.event, DocumentStartEvent) or not self.events:
+ return False
+ event = self.events[0]
+ return (isinstance(event, ScalarEvent) and event.anchor is None
+ and event.tag is None and event.implicit and event.value == '')
+
+ def check_simple_key(self):
+ length = 0
+ if isinstance(self.event, NodeEvent) and self.event.anchor is not None:
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ length += len(self.prepared_anchor)
+ if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \
+ and self.event.tag is not None:
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(self.event.tag)
+ length += len(self.prepared_tag)
+ if isinstance(self.event, ScalarEvent):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ length += len(self.analysis.scalar)
+ return (length < 128 and (isinstance(self.event, AliasEvent)
+ or (isinstance(self.event, ScalarEvent)
+ and not self.analysis.empty and not self.analysis.multiline)
+ or self.check_empty_sequence() or self.check_empty_mapping()))
+
+ # Anchor, Tag, and Scalar processors.
+
+ def process_anchor(self, indicator):
+ if self.event.anchor is None:
+ self.prepared_anchor = None
+ return
+ if self.prepared_anchor is None:
+ self.prepared_anchor = self.prepare_anchor(self.event.anchor)
+ if self.prepared_anchor:
+ self.write_indicator(indicator+self.prepared_anchor, True)
+ self.prepared_anchor = None
+
+ def process_tag(self):
+ tag = self.event.tag
+ if isinstance(self.event, ScalarEvent):
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ if ((not self.canonical or tag is None) and
+ ((self.style == '' and self.event.implicit[0])
+ or (self.style != '' and self.event.implicit[1]))):
+ self.prepared_tag = None
+ return
+ if self.event.implicit[0] and tag is None:
+ tag = '!'
+ self.prepared_tag = None
+ else:
+ if (not self.canonical or tag is None) and self.event.implicit:
+ self.prepared_tag = None
+ return
+ if tag is None:
+ raise EmitterError("tag is not specified")
+ if self.prepared_tag is None:
+ self.prepared_tag = self.prepare_tag(tag)
+ if self.prepared_tag:
+ self.write_indicator(self.prepared_tag, True)
+ self.prepared_tag = None
+
+ def choose_scalar_style(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.event.style == '"' or self.canonical:
+ return '"'
+ if not self.event.style and self.event.implicit[0]:
+ if (not (self.simple_key_context and
+ (self.analysis.empty or self.analysis.multiline))
+ and (self.flow_level and self.analysis.allow_flow_plain
+ or (not self.flow_level and self.analysis.allow_block_plain))):
+ return ''
+ if self.event.style and self.event.style in '|>':
+ if (not self.flow_level and not self.simple_key_context
+ and self.analysis.allow_block):
+ return self.event.style
+ if not self.event.style or self.event.style == '\'':
+ if (self.analysis.allow_single_quoted and
+ not (self.simple_key_context and self.analysis.multiline)):
+ return '\''
+ return '"'
+
+ def process_scalar(self):
+ if self.analysis is None:
+ self.analysis = self.analyze_scalar(self.event.value)
+ if self.style is None:
+ self.style = self.choose_scalar_style()
+ split = (not self.simple_key_context)
+ #if self.analysis.multiline and split \
+ # and (not self.style or self.style in '\'\"'):
+ # self.write_indent()
+ if self.style == '"':
+ self.write_double_quoted(self.analysis.scalar, split)
+ elif self.style == '\'':
+ self.write_single_quoted(self.analysis.scalar, split)
+ elif self.style == '>':
+ self.write_folded(self.analysis.scalar)
+ elif self.style == '|':
+ self.write_literal(self.analysis.scalar)
+ else:
+ self.write_plain(self.analysis.scalar, split)
+ self.analysis = None
+ self.style = None
+
+ # Analyzers.
+
+ def prepare_version(self, version):
+ major, minor = version
+ if major != 1:
+ raise EmitterError("unsupported YAML version: %d.%d" % (major, minor))
+ return '%d.%d' % (major, minor)
+
+ def prepare_tag_handle(self, handle):
+ if not handle:
+ raise EmitterError("tag handle must not be empty")
+ if handle[0] != '!' or handle[-1] != '!':
+ raise EmitterError("tag handle must start and end with '!': %r" % handle)
+ for ch in handle[1:-1]:
+ if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_'):
+ raise EmitterError("invalid character %r in the tag handle: %r"
+ % (ch, handle))
+ return handle
+
+ def prepare_tag_prefix(self, prefix):
+ if not prefix:
+ raise EmitterError("tag prefix must not be empty")
+ chunks = []
+ start = end = 0
+ if prefix[0] == '!':
+ end = 1
+ while end < len(prefix):
+ ch = prefix[end]
+ if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?!:@&=+$,_.~*\'()[]':
+ end += 1
+ else:
+ if start < end:
+ chunks.append(prefix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append('%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(prefix[start:end])
+ return ''.join(chunks)
+
+ def prepare_tag(self, tag):
+ if not tag:
+ raise EmitterError("tag must not be empty")
+ if tag == '!':
+ return tag
+ handle = None
+ suffix = tag
+ prefixes = sorted(self.tag_prefixes.keys())
+ for prefix in prefixes:
+ if tag.startswith(prefix) \
+ and (prefix == '!' or len(prefix) < len(tag)):
+ handle = self.tag_prefixes[prefix]
+ suffix = tag[len(prefix):]
+ chunks = []
+ start = end = 0
+ while end < len(suffix):
+ ch = suffix[end]
+ if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?:@&=+$,_.~*\'()[]' \
+ or (ch == '!' and handle != '!'):
+ end += 1
+ else:
+ if start < end:
+ chunks.append(suffix[start:end])
+ start = end = end+1
+ data = ch.encode('utf-8')
+ for ch in data:
+ chunks.append('%%%02X' % ord(ch))
+ if start < end:
+ chunks.append(suffix[start:end])
+ suffix_text = ''.join(chunks)
+ if handle:
+ return '%s%s' % (handle, suffix_text)
+ else:
+ return '!<%s>' % suffix_text
+
+ def prepare_anchor(self, anchor):
+ if not anchor:
+ raise EmitterError("anchor must not be empty")
+ for ch in anchor:
+ if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_'):
+ raise EmitterError("invalid character %r in the anchor: %r"
+ % (ch, anchor))
+ return anchor
+
+ def analyze_scalar(self, scalar):
+
+ # Empty scalar is a special case.
+ if not scalar:
+ return ScalarAnalysis(scalar=scalar, empty=True, multiline=False,
+ allow_flow_plain=False, allow_block_plain=True,
+ allow_single_quoted=True, allow_double_quoted=True,
+ allow_block=False)
+
+ # Indicators and special characters.
+ block_indicators = False
+ flow_indicators = False
+ line_breaks = False
+ special_characters = False
+
+ # Important whitespace combinations.
+ leading_space = False
+ leading_break = False
+ trailing_space = False
+ trailing_break = False
+ break_space = False
+ space_break = False
+
+ # Check document indicators.
+ if scalar.startswith('---') or scalar.startswith('...'):
+ block_indicators = True
+ flow_indicators = True
+
+ # First character or preceded by a whitespace.
+ preceeded_by_whitespace = True
+
+ # Last character or followed by a whitespace.
+ followed_by_whitespace = (len(scalar) == 1 or
+ scalar[1] in '\0 \t\r\n\x85\u2028\u2029')
+
+ # The previous character is a space.
+ previous_space = False
+
+ # The previous character is a break.
+ previous_break = False
+
+ index = 0
+ while index < len(scalar):
+ ch = scalar[index]
+
+ # Check for indicators.
+ if index == 0:
+ # Leading indicators are special characters.
+ if ch in '#,[]{}&*!|>\'\"%@`':
+ flow_indicators = True
+ block_indicators = True
+ if ch in '?:':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == '-' and followed_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+ else:
+ # Some indicators cannot appear within a scalar as well.
+ if ch in ',?[]{}':
+ flow_indicators = True
+ if ch == ':':
+ flow_indicators = True
+ if followed_by_whitespace:
+ block_indicators = True
+ if ch == '#' and preceeded_by_whitespace:
+ flow_indicators = True
+ block_indicators = True
+
+ # Check for line breaks, special, and unicode characters.
+ if ch in '\n\x85\u2028\u2029':
+ line_breaks = True
+ if not (ch == '\n' or '\x20' <= ch <= '\x7E'):
+ if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF'
+ or '\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF':
+ unicode_characters = True
+ if not self.allow_unicode:
+ special_characters = True
+ else:
+ special_characters = True
+
+ # Detect important whitespace combinations.
+ if ch == ' ':
+ if index == 0:
+ leading_space = True
+ if index == len(scalar)-1:
+ trailing_space = True
+ if previous_break:
+ break_space = True
+ previous_space = True
+ previous_break = False
+ elif ch in '\n\x85\u2028\u2029':
+ if index == 0:
+ leading_break = True
+ if index == len(scalar)-1:
+ trailing_break = True
+ if previous_space:
+ space_break = True
+ previous_space = False
+ previous_break = True
+ else:
+ previous_space = False
+ previous_break = False
+
+ # Prepare for the next character.
+ index += 1
+ preceeded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029')
+ followed_by_whitespace = (index+1 >= len(scalar) or
+ scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029')
+
+ # Let's decide what styles are allowed.
+ allow_flow_plain = True
+ allow_block_plain = True
+ allow_single_quoted = True
+ allow_double_quoted = True
+ allow_block = True
+
+ # Leading and trailing whitespaces are bad for plain scalars.
+ if (leading_space or leading_break
+ or trailing_space or trailing_break):
+ allow_flow_plain = allow_block_plain = False
+
+ # We do not permit trailing spaces for block scalars.
+ if trailing_space:
+ allow_block = False
+
+ # Spaces at the beginning of a new line are only acceptable for block
+ # scalars.
+ if break_space:
+ allow_flow_plain = allow_block_plain = allow_single_quoted = False
+
+ # Spaces followed by breaks, as well as special character are only
+ # allowed for double quoted scalars.
+ if space_break or special_characters:
+ allow_flow_plain = allow_block_plain = \
+ allow_single_quoted = allow_block = False
+
+ # Although the plain scalar writer supports breaks, we never emit
+ # multiline plain scalars.
+ if line_breaks:
+ allow_flow_plain = allow_block_plain = False
+
+ # Flow indicators are forbidden for flow plain scalars.
+ if flow_indicators:
+ allow_flow_plain = False
+
+ # Block indicators are forbidden for block plain scalars.
+ if block_indicators:
+ allow_block_plain = False
+
+ return ScalarAnalysis(scalar=scalar,
+ empty=False, multiline=line_breaks,
+ allow_flow_plain=allow_flow_plain,
+ allow_block_plain=allow_block_plain,
+ allow_single_quoted=allow_single_quoted,
+ allow_double_quoted=allow_double_quoted,
+ allow_block=allow_block)
+
+ # Writers.
+
+ def flush_stream(self):
+ if hasattr(self.stream, 'flush'):
+ self.stream.flush()
+
+ def write_stream_start(self):
+ # Write BOM if needed.
+ if self.encoding and self.encoding.startswith('utf-16'):
+ self.stream.write('\uFEFF'.encode(self.encoding))
+
+ def write_stream_end(self):
+ self.flush_stream()
+
+ def write_indicator(self, indicator, need_whitespace,
+ whitespace=False, indention=False):
+ if self.whitespace or not need_whitespace:
+ data = indicator
+ else:
+ data = ' '+indicator
+ self.whitespace = whitespace
+ self.indention = self.indention and indention
+ self.column += len(data)
+ self.open_ended = False
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_indent(self):
+ indent = self.indent or 0
+ if not self.indention or self.column > indent \
+ or (self.column == indent and not self.whitespace):
+ self.write_line_break()
+ if self.column < indent:
+ self.whitespace = True
+ data = ' '*(indent-self.column)
+ self.column = indent
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_line_break(self, data=None):
+ if data is None:
+ data = self.best_line_break
+ self.whitespace = True
+ self.indention = True
+ self.line += 1
+ self.column = 0
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+
+ def write_version_directive(self, version_text):
+ data = '%%YAML %s' % version_text
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ def write_tag_directive(self, handle_text, prefix_text):
+ data = '%%TAG %s %s' % (handle_text, prefix_text)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_line_break()
+
+ # Scalar streams.
+
+ def write_single_quoted(self, text, split=True):
+ self.write_indicator('\'', True)
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch is None or ch != ' ':
+ if start+1 == end and self.column > self.best_width and split \
+ and start != 0 and end != len(text):
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ if text[start] == '\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'':
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch == '\'':
+ data = '\'\''
+ self.column += 2
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end + 1
+ if ch is not None:
+ spaces = (ch == ' ')
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+ self.write_indicator('\'', False)
+
+ ESCAPE_REPLACEMENTS = {
+ '\0': '0',
+ '\x07': 'a',
+ '\x08': 'b',
+ '\x09': 't',
+ '\x0A': 'n',
+ '\x0B': 'v',
+ '\x0C': 'f',
+ '\x0D': 'r',
+ '\x1B': 'e',
+ '\"': '\"',
+ '\\': '\\',
+ '\x85': 'N',
+ '\xA0': '_',
+ '\u2028': 'L',
+ '\u2029': 'P',
+ }
+
+ def write_double_quoted(self, text, split=True):
+ self.write_indicator('"', True)
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \
+ or not ('\x20' <= ch <= '\x7E'
+ or (self.allow_unicode
+ and ('\xA0' <= ch <= '\uD7FF'
+ or '\uE000' <= ch <= '\uFFFD'))):
+ if start < end:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ if ch in self.ESCAPE_REPLACEMENTS:
+ data = '\\'+self.ESCAPE_REPLACEMENTS[ch]
+ elif ch <= '\xFF':
+ data = '\\x%02X' % ord(ch)
+ elif ch <= '\uFFFF':
+ data = '\\u%04X' % ord(ch)
+ else:
+ data = '\\U%08X' % ord(ch)
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end+1
+ if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \
+ and self.column+(end-start) > self.best_width and split:
+ data = text[start:end]+'\\'
+ if start < end:
+ start = end
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ if text[start] == ' ':
+ data = '\\'
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ end += 1
+ self.write_indicator('"', False)
+
+ def determine_block_hints(self, text):
+ hints = ''
+ if text:
+ if text[0] in ' \n\x85\u2028\u2029':
+ hints += str(self.best_indent)
+ if text[-1] not in '\n\x85\u2028\u2029':
+ hints += '-'
+ elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029':
+ hints += '+'
+ return hints
+
+ def write_folded(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator('>'+hints, True)
+ if hints[-1:] == '+':
+ self.open_ended = True
+ self.write_line_break()
+ leading_space = True
+ spaces = False
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ if not leading_space and ch is not None and ch != ' ' \
+ and text[start] == '\n':
+ self.write_line_break()
+ leading_space = (ch == ' ')
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ elif spaces:
+ if ch != ' ':
+ if start+1 == end and self.column > self.best_width:
+ self.write_indent()
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in '\n\x85\u2028\u2029')
+ spaces = (ch == ' ')
+ end += 1
+
+ def write_literal(self, text):
+ hints = self.determine_block_hints(text)
+ self.write_indicator('|'+hints, True)
+ if hints[-1:] == '+':
+ self.open_ended = True
+ self.write_line_break()
+ breaks = True
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if breaks:
+ if ch is None or ch not in '\n\x85\u2028\u2029':
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ if ch is not None:
+ self.write_indent()
+ start = end
+ else:
+ if ch is None or ch in '\n\x85\u2028\u2029':
+ data = text[start:end]
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ if ch is None:
+ self.write_line_break()
+ start = end
+ if ch is not None:
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+
+ def write_plain(self, text, split=True):
+ if self.root_context:
+ self.open_ended = True
+ if not text:
+ return
+ if not self.whitespace:
+ data = ' '
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ self.whitespace = False
+ self.indention = False
+ spaces = False
+ breaks = False
+ start = end = 0
+ while end <= len(text):
+ ch = None
+ if end < len(text):
+ ch = text[end]
+ if spaces:
+ if ch != ' ':
+ if start+1 == end and self.column > self.best_width and split:
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ else:
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ elif breaks:
+ if ch not in '\n\x85\u2028\u2029':
+ if text[start] == '\n':
+ self.write_line_break()
+ for br in text[start:end]:
+ if br == '\n':
+ self.write_line_break()
+ else:
+ self.write_line_break(br)
+ self.write_indent()
+ self.whitespace = False
+ self.indention = False
+ start = end
+ else:
+ if ch is None or ch in ' \n\x85\u2028\u2029':
+ data = text[start:end]
+ self.column += len(data)
+ if self.encoding:
+ data = data.encode(self.encoding)
+ self.stream.write(data)
+ start = end
+ if ch is not None:
+ spaces = (ch == ' ')
+ breaks = (ch in '\n\x85\u2028\u2029')
+ end += 1
+
diff --git a/lib/spack/external/yaml/lib3/yaml/error.py b/lib/spack/external/yaml/lib3/yaml/error.py
new file mode 100644
index 0000000000..b796b4dc51
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/error.py
@@ -0,0 +1,75 @@
+
+__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError']
+
+class Mark:
+
+ def __init__(self, name, index, line, column, buffer, pointer):
+ self.name = name
+ self.index = index
+ self.line = line
+ self.column = column
+ self.buffer = buffer
+ self.pointer = pointer
+
+ def get_snippet(self, indent=4, max_length=75):
+ if self.buffer is None:
+ return None
+ head = ''
+ start = self.pointer
+ while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029':
+ start -= 1
+ if self.pointer-start > max_length/2-1:
+ head = ' ... '
+ start += 5
+ break
+ tail = ''
+ end = self.pointer
+ while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029':
+ end += 1
+ if end-self.pointer > max_length/2-1:
+ tail = ' ... '
+ end -= 5
+ break
+ snippet = self.buffer[start:end]
+ return ' '*indent + head + snippet + tail + '\n' \
+ + ' '*(indent+self.pointer-start+len(head)) + '^'
+
+ def __str__(self):
+ snippet = self.get_snippet()
+ where = " in \"%s\", line %d, column %d" \
+ % (self.name, self.line+1, self.column+1)
+ if snippet is not None:
+ where += ":\n"+snippet
+ return where
+
+class YAMLError(Exception):
+ pass
+
+class MarkedYAMLError(YAMLError):
+
+ def __init__(self, context=None, context_mark=None,
+ problem=None, problem_mark=None, note=None):
+ self.context = context
+ self.context_mark = context_mark
+ self.problem = problem
+ self.problem_mark = problem_mark
+ self.note = note
+
+ def __str__(self):
+ lines = []
+ if self.context is not None:
+ lines.append(self.context)
+ if self.context_mark is not None \
+ and (self.problem is None or self.problem_mark is None
+ or self.context_mark.name != self.problem_mark.name
+ or self.context_mark.line != self.problem_mark.line
+ or self.context_mark.column != self.problem_mark.column):
+ lines.append(str(self.context_mark))
+ if self.problem is not None:
+ lines.append(self.problem)
+ if self.problem_mark is not None:
+ lines.append(str(self.problem_mark))
+ if self.note is not None:
+ lines.append(self.note)
+ return '\n'.join(lines)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/events.py b/lib/spack/external/yaml/lib3/yaml/events.py
new file mode 100644
index 0000000000..f79ad389cb
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/events.py
@@ -0,0 +1,86 @@
+
+# Abstract classes.
+
+class Event(object):
+ def __init__(self, start_mark=None, end_mark=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in ['anchor', 'tag', 'implicit', 'value']
+ if hasattr(self, key)]
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+class NodeEvent(Event):
+ def __init__(self, anchor, start_mark=None, end_mark=None):
+ self.anchor = anchor
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class CollectionStartEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None,
+ flow_style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class CollectionEndEvent(Event):
+ pass
+
+# Implementations.
+
+class StreamStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None, encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndEvent(Event):
+ pass
+
+class DocumentStartEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None, version=None, tags=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+ self.version = version
+ self.tags = tags
+
+class DocumentEndEvent(Event):
+ def __init__(self, start_mark=None, end_mark=None,
+ explicit=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.explicit = explicit
+
+class AliasEvent(NodeEvent):
+ pass
+
+class ScalarEvent(NodeEvent):
+ def __init__(self, anchor, tag, implicit, value,
+ start_mark=None, end_mark=None, style=None):
+ self.anchor = anchor
+ self.tag = tag
+ self.implicit = implicit
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class SequenceStartEvent(CollectionStartEvent):
+ pass
+
+class SequenceEndEvent(CollectionEndEvent):
+ pass
+
+class MappingStartEvent(CollectionStartEvent):
+ pass
+
+class MappingEndEvent(CollectionEndEvent):
+ pass
+
diff --git a/lib/spack/external/yaml/lib3/yaml/loader.py b/lib/spack/external/yaml/lib3/yaml/loader.py
new file mode 100644
index 0000000000..08c8f01b34
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/loader.py
@@ -0,0 +1,40 @@
+
+__all__ = ['BaseLoader', 'SafeLoader', 'Loader']
+
+from .reader import *
+from .scanner import *
+from .parser import *
+from .composer import *
+from .constructor import *
+from .resolver import *
+
+class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ BaseConstructor.__init__(self)
+ BaseResolver.__init__(self)
+
+class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ SafeConstructor.__init__(self)
+ Resolver.__init__(self)
+
+class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver):
+
+ def __init__(self, stream):
+ Reader.__init__(self, stream)
+ Scanner.__init__(self)
+ Parser.__init__(self)
+ Composer.__init__(self)
+ Constructor.__init__(self)
+ Resolver.__init__(self)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/nodes.py b/lib/spack/external/yaml/lib3/yaml/nodes.py
new file mode 100644
index 0000000000..c4f070c41e
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/nodes.py
@@ -0,0 +1,49 @@
+
+class Node(object):
+ def __init__(self, tag, value, start_mark, end_mark):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ value = self.value
+ #if isinstance(value, list):
+ # if len(value) == 0:
+ # value = '<empty>'
+ # elif len(value) == 1:
+ # value = '<1 item>'
+ # else:
+ # value = '<%d items>' % len(value)
+ #else:
+ # if len(value) > 75:
+ # value = repr(value[:70]+u' ... ')
+ # else:
+ # value = repr(value)
+ value = repr(value)
+ return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
+
+class ScalarNode(Node):
+ id = 'scalar'
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
+class CollectionNode(Node):
+ def __init__(self, tag, value,
+ start_mark=None, end_mark=None, flow_style=None):
+ self.tag = tag
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.flow_style = flow_style
+
+class SequenceNode(CollectionNode):
+ id = 'sequence'
+
+class MappingNode(CollectionNode):
+ id = 'mapping'
+
diff --git a/lib/spack/external/yaml/lib3/yaml/parser.py b/lib/spack/external/yaml/lib3/yaml/parser.py
new file mode 100644
index 0000000000..13a5995d29
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/parser.py
@@ -0,0 +1,589 @@
+
+# The following YAML grammar is LL(1) and is parsed by a recursive descent
+# parser.
+#
+# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+# implicit_document ::= block_node DOCUMENT-END*
+# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+# block_node_or_indentless_sequence ::=
+# ALIAS
+# | properties (block_content | indentless_block_sequence)?
+# | block_content
+# | indentless_block_sequence
+# block_node ::= ALIAS
+# | properties block_content?
+# | block_content
+# flow_node ::= ALIAS
+# | properties flow_content?
+# | flow_content
+# properties ::= TAG ANCHOR? | ANCHOR TAG?
+# block_content ::= block_collection | flow_collection | SCALAR
+# flow_content ::= flow_collection | SCALAR
+# block_collection ::= block_sequence | block_mapping
+# flow_collection ::= flow_sequence | flow_mapping
+# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+# indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+# block_mapping ::= BLOCK-MAPPING_START
+# ((KEY block_node_or_indentless_sequence?)?
+# (VALUE block_node_or_indentless_sequence?)?)*
+# BLOCK-END
+# flow_sequence ::= FLOW-SEQUENCE-START
+# (flow_sequence_entry FLOW-ENTRY)*
+# flow_sequence_entry?
+# FLOW-SEQUENCE-END
+# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+# flow_mapping ::= FLOW-MAPPING-START
+# (flow_mapping_entry FLOW-ENTRY)*
+# flow_mapping_entry?
+# FLOW-MAPPING-END
+# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+#
+# FIRST sets:
+#
+# stream: { STREAM-START }
+# explicit_document: { DIRECTIVE DOCUMENT-START }
+# implicit_document: FIRST(block_node)
+# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR }
+# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# block_sequence: { BLOCK-SEQUENCE-START }
+# block_mapping: { BLOCK-MAPPING-START }
+# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY }
+# indentless_sequence: { ENTRY }
+# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START }
+# flow_sequence: { FLOW-SEQUENCE-START }
+# flow_mapping: { FLOW-MAPPING-START }
+# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY }
+
+__all__ = ['Parser', 'ParserError']
+
+from .error import MarkedYAMLError
+from .tokens import *
+from .events import *
+from .scanner import *
+
+class ParserError(MarkedYAMLError):
+ pass
+
+class Parser:
+ # Since writing a recursive-descendant parser is a straightforward task, we
+ # do not give many comments here.
+
+ DEFAULT_TAGS = {
+ '!': '!',
+ '!!': 'tag:yaml.org,2002:',
+ }
+
+ def __init__(self):
+ self.current_event = None
+ self.yaml_version = None
+ self.tag_handles = {}
+ self.states = []
+ self.marks = []
+ self.state = self.parse_stream_start
+
+ def dispose(self):
+ # Reset the state attributes (to clear self-references)
+ self.states = []
+ self.state = None
+
+ def check_event(self, *choices):
+ # Check the type of the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ if self.current_event is not None:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.current_event, choice):
+ return True
+ return False
+
+ def peek_event(self):
+ # Get the next event.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ return self.current_event
+
+ def get_event(self):
+ # Get the next event and proceed further.
+ if self.current_event is None:
+ if self.state:
+ self.current_event = self.state()
+ value = self.current_event
+ self.current_event = None
+ return value
+
+ # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END
+ # implicit_document ::= block_node DOCUMENT-END*
+ # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END*
+
+ def parse_stream_start(self):
+
+ # Parse the stream start.
+ token = self.get_token()
+ event = StreamStartEvent(token.start_mark, token.end_mark,
+ encoding=token.encoding)
+
+ # Prepare the next state.
+ self.state = self.parse_implicit_document_start
+
+ return event
+
+ def parse_implicit_document_start(self):
+
+ # Parse an implicit document.
+ if not self.check_token(DirectiveToken, DocumentStartToken,
+ StreamEndToken):
+ self.tag_handles = self.DEFAULT_TAGS
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=False)
+
+ # Prepare the next state.
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_block_node
+
+ return event
+
+ else:
+ return self.parse_document_start()
+
+ def parse_document_start(self):
+
+ # Parse any extra document end indicators.
+ while self.check_token(DocumentEndToken):
+ self.get_token()
+
+ # Parse an explicit document.
+ if not self.check_token(StreamEndToken):
+ token = self.peek_token()
+ start_mark = token.start_mark
+ version, tags = self.process_directives()
+ if not self.check_token(DocumentStartToken):
+ raise ParserError(None, None,
+ "expected '<document start>', but found %r"
+ % self.peek_token().id,
+ self.peek_token().start_mark)
+ token = self.get_token()
+ end_mark = token.end_mark
+ event = DocumentStartEvent(start_mark, end_mark,
+ explicit=True, version=version, tags=tags)
+ self.states.append(self.parse_document_end)
+ self.state = self.parse_document_content
+ else:
+ # Parse the end of the stream.
+ token = self.get_token()
+ event = StreamEndEvent(token.start_mark, token.end_mark)
+ assert not self.states
+ assert not self.marks
+ self.state = None
+ return event
+
+ def parse_document_end(self):
+
+ # Parse the document end.
+ token = self.peek_token()
+ start_mark = end_mark = token.start_mark
+ explicit = False
+ if self.check_token(DocumentEndToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ explicit = True
+ event = DocumentEndEvent(start_mark, end_mark,
+ explicit=explicit)
+
+ # Prepare the next state.
+ self.state = self.parse_document_start
+
+ return event
+
+ def parse_document_content(self):
+ if self.check_token(DirectiveToken,
+ DocumentStartToken, DocumentEndToken, StreamEndToken):
+ event = self.process_empty_scalar(self.peek_token().start_mark)
+ self.state = self.states.pop()
+ return event
+ else:
+ return self.parse_block_node()
+
+ def process_directives(self):
+ self.yaml_version = None
+ self.tag_handles = {}
+ while self.check_token(DirectiveToken):
+ token = self.get_token()
+ if token.name == 'YAML':
+ if self.yaml_version is not None:
+ raise ParserError(None, None,
+ "found duplicate YAML directive", token.start_mark)
+ major, minor = token.value
+ if major != 1:
+ raise ParserError(None, None,
+ "found incompatible YAML document (version 1.* is required)",
+ token.start_mark)
+ self.yaml_version = token.value
+ elif token.name == 'TAG':
+ handle, prefix = token.value
+ if handle in self.tag_handles:
+ raise ParserError(None, None,
+ "duplicate tag handle %r" % handle,
+ token.start_mark)
+ self.tag_handles[handle] = prefix
+ if self.tag_handles:
+ value = self.yaml_version, self.tag_handles.copy()
+ else:
+ value = self.yaml_version, None
+ for key in self.DEFAULT_TAGS:
+ if key not in self.tag_handles:
+ self.tag_handles[key] = self.DEFAULT_TAGS[key]
+ return value
+
+ # block_node_or_indentless_sequence ::= ALIAS
+ # | properties (block_content | indentless_block_sequence)?
+ # | block_content
+ # | indentless_block_sequence
+ # block_node ::= ALIAS
+ # | properties block_content?
+ # | block_content
+ # flow_node ::= ALIAS
+ # | properties flow_content?
+ # | flow_content
+ # properties ::= TAG ANCHOR? | ANCHOR TAG?
+ # block_content ::= block_collection | flow_collection | SCALAR
+ # flow_content ::= flow_collection | SCALAR
+ # block_collection ::= block_sequence | block_mapping
+ # flow_collection ::= flow_sequence | flow_mapping
+
+ def parse_block_node(self):
+ return self.parse_node(block=True)
+
+ def parse_flow_node(self):
+ return self.parse_node()
+
+ def parse_block_node_or_indentless_sequence(self):
+ return self.parse_node(block=True, indentless_sequence=True)
+
+ def parse_node(self, block=False, indentless_sequence=False):
+ if self.check_token(AliasToken):
+ token = self.get_token()
+ event = AliasEvent(token.value, token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ else:
+ anchor = None
+ tag = None
+ start_mark = end_mark = tag_mark = None
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ start_mark = token.start_mark
+ end_mark = token.end_mark
+ anchor = token.value
+ if self.check_token(TagToken):
+ token = self.get_token()
+ tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ elif self.check_token(TagToken):
+ token = self.get_token()
+ start_mark = tag_mark = token.start_mark
+ end_mark = token.end_mark
+ tag = token.value
+ if self.check_token(AnchorToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ anchor = token.value
+ if tag is not None:
+ handle, suffix = tag
+ if handle is not None:
+ if handle not in self.tag_handles:
+ raise ParserError("while parsing a node", start_mark,
+ "found undefined tag handle %r" % handle,
+ tag_mark)
+ tag = self.tag_handles[handle]+suffix
+ else:
+ tag = suffix
+ #if tag == '!':
+ # raise ParserError("while parsing a node", start_mark,
+ # "found non-specific tag '!'", tag_mark,
+ # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.")
+ if start_mark is None:
+ start_mark = end_mark = self.peek_token().start_mark
+ event = None
+ implicit = (tag is None or tag == '!')
+ if indentless_sequence and self.check_token(BlockEntryToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark)
+ self.state = self.parse_indentless_sequence_entry
+ else:
+ if self.check_token(ScalarToken):
+ token = self.get_token()
+ end_mark = token.end_mark
+ if (token.plain and tag is None) or tag == '!':
+ implicit = (True, False)
+ elif tag is None:
+ implicit = (False, True)
+ else:
+ implicit = (False, False)
+ event = ScalarEvent(anchor, tag, implicit, token.value,
+ start_mark, end_mark, style=token.style)
+ self.state = self.states.pop()
+ elif self.check_token(FlowSequenceStartToken):
+ end_mark = self.peek_token().end_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_sequence_first_entry
+ elif self.check_token(FlowMappingStartToken):
+ end_mark = self.peek_token().end_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=True)
+ self.state = self.parse_flow_mapping_first_key
+ elif block and self.check_token(BlockSequenceStartToken):
+ end_mark = self.peek_token().start_mark
+ event = SequenceStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_sequence_first_entry
+ elif block and self.check_token(BlockMappingStartToken):
+ end_mark = self.peek_token().start_mark
+ event = MappingStartEvent(anchor, tag, implicit,
+ start_mark, end_mark, flow_style=False)
+ self.state = self.parse_block_mapping_first_key
+ elif anchor is not None or tag is not None:
+ # Empty scalars are allowed even if a tag or an anchor is
+ # specified.
+ event = ScalarEvent(anchor, tag, (implicit, False), '',
+ start_mark, end_mark)
+ self.state = self.states.pop()
+ else:
+ if block:
+ node = 'block'
+ else:
+ node = 'flow'
+ token = self.peek_token()
+ raise ParserError("while parsing a %s node" % node, start_mark,
+ "expected the node content, but found %r" % token.id,
+ token.start_mark)
+ return event
+
+ # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END
+
+ def parse_block_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_sequence_entry()
+
+ def parse_block_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken, BlockEndToken):
+ self.states.append(self.parse_block_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_block_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block collection", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ # indentless_sequence ::= (BLOCK-ENTRY block_node?)+
+
+ def parse_indentless_sequence_entry(self):
+ if self.check_token(BlockEntryToken):
+ token = self.get_token()
+ if not self.check_token(BlockEntryToken,
+ KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_indentless_sequence_entry)
+ return self.parse_block_node()
+ else:
+ self.state = self.parse_indentless_sequence_entry
+ return self.process_empty_scalar(token.end_mark)
+ token = self.peek_token()
+ event = SequenceEndEvent(token.start_mark, token.start_mark)
+ self.state = self.states.pop()
+ return event
+
+ # block_mapping ::= BLOCK-MAPPING_START
+ # ((KEY block_node_or_indentless_sequence?)?
+ # (VALUE block_node_or_indentless_sequence?)?)*
+ # BLOCK-END
+
+ def parse_block_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_block_mapping_key()
+
+ def parse_block_mapping_key(self):
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_value)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ if not self.check_token(BlockEndToken):
+ token = self.peek_token()
+ raise ParserError("while parsing a block mapping", self.marks[-1],
+ "expected <block end>, but found %r" % token.id, token.start_mark)
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_block_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(KeyToken, ValueToken, BlockEndToken):
+ self.states.append(self.parse_block_mapping_key)
+ return self.parse_block_node_or_indentless_sequence()
+ else:
+ self.state = self.parse_block_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_block_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ # flow_sequence ::= FLOW-SEQUENCE-START
+ # (flow_sequence_entry FLOW-ENTRY)*
+ # flow_sequence_entry?
+ # FLOW-SEQUENCE-END
+ # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+ #
+ # Note that while production rules for both flow_sequence_entry and
+ # flow_mapping_entry are equal, their interpretations are different.
+ # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?`
+ # generate an inline mapping (set syntax).
+
+ def parse_flow_sequence_first_entry(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_sequence_entry(first=True)
+
+ def parse_flow_sequence_entry(self, first=False):
+ if not self.check_token(FlowSequenceEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow sequence", self.marks[-1],
+ "expected ',' or ']', but got %r" % token.id, token.start_mark)
+
+ if self.check_token(KeyToken):
+ token = self.peek_token()
+ event = MappingStartEvent(None, None, True,
+ token.start_mark, token.end_mark,
+ flow_style=True)
+ self.state = self.parse_flow_sequence_entry_mapping_key
+ return event
+ elif not self.check_token(FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = SequenceEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_sequence_entry_mapping_key(self):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+
+ def parse_flow_sequence_entry_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowSequenceEndToken):
+ self.states.append(self.parse_flow_sequence_entry_mapping_end)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_sequence_entry_mapping_end
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_sequence_entry_mapping_end(self):
+ self.state = self.parse_flow_sequence_entry
+ token = self.peek_token()
+ return MappingEndEvent(token.start_mark, token.start_mark)
+
+ # flow_mapping ::= FLOW-MAPPING-START
+ # (flow_mapping_entry FLOW-ENTRY)*
+ # flow_mapping_entry?
+ # FLOW-MAPPING-END
+ # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)?
+
+ def parse_flow_mapping_first_key(self):
+ token = self.get_token()
+ self.marks.append(token.start_mark)
+ return self.parse_flow_mapping_key(first=True)
+
+ def parse_flow_mapping_key(self, first=False):
+ if not self.check_token(FlowMappingEndToken):
+ if not first:
+ if self.check_token(FlowEntryToken):
+ self.get_token()
+ else:
+ token = self.peek_token()
+ raise ParserError("while parsing a flow mapping", self.marks[-1],
+ "expected ',' or '}', but got %r" % token.id, token.start_mark)
+ if self.check_token(KeyToken):
+ token = self.get_token()
+ if not self.check_token(ValueToken,
+ FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_value)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_value
+ return self.process_empty_scalar(token.end_mark)
+ elif not self.check_token(FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_empty_value)
+ return self.parse_flow_node()
+ token = self.get_token()
+ event = MappingEndEvent(token.start_mark, token.end_mark)
+ self.state = self.states.pop()
+ self.marks.pop()
+ return event
+
+ def parse_flow_mapping_value(self):
+ if self.check_token(ValueToken):
+ token = self.get_token()
+ if not self.check_token(FlowEntryToken, FlowMappingEndToken):
+ self.states.append(self.parse_flow_mapping_key)
+ return self.parse_flow_node()
+ else:
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(token.end_mark)
+ else:
+ self.state = self.parse_flow_mapping_key
+ token = self.peek_token()
+ return self.process_empty_scalar(token.start_mark)
+
+ def parse_flow_mapping_empty_value(self):
+ self.state = self.parse_flow_mapping_key
+ return self.process_empty_scalar(self.peek_token().start_mark)
+
+ def process_empty_scalar(self, mark):
+ return ScalarEvent(None, None, (True, False), '', mark, mark)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/reader.py b/lib/spack/external/yaml/lib3/yaml/reader.py
new file mode 100644
index 0000000000..f70e920f44
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/reader.py
@@ -0,0 +1,192 @@
+# This module contains abstractions for the input stream. You don't have to
+# looks further, there are no pretty code.
+#
+# We define two classes here.
+#
+# Mark(source, line, column)
+# It's just a record and its only use is producing nice error messages.
+# Parser does not use it for any other purposes.
+#
+# Reader(source, data)
+# Reader determines the encoding of `data` and converts it to unicode.
+# Reader provides the following methods and attributes:
+# reader.peek(length=1) - return the next `length` characters
+# reader.forward(length=1) - move the current position to `length` characters.
+# reader.index - the number of the current character.
+# reader.line, stream.column - the line and the column of the current character.
+
+__all__ = ['Reader', 'ReaderError']
+
+from .error import YAMLError, Mark
+
+import codecs, re
+
+class ReaderError(YAMLError):
+
+ def __init__(self, name, position, character, encoding, reason):
+ self.name = name
+ self.character = character
+ self.position = position
+ self.encoding = encoding
+ self.reason = reason
+
+ def __str__(self):
+ if isinstance(self.character, bytes):
+ return "'%s' codec can't decode byte #x%02x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.encoding, ord(self.character), self.reason,
+ self.name, self.position)
+ else:
+ return "unacceptable character #x%04x: %s\n" \
+ " in \"%s\", position %d" \
+ % (self.character, self.reason,
+ self.name, self.position)
+
+class Reader(object):
+ # Reader:
+ # - determines the data encoding and converts it to a unicode string,
+ # - checks if characters are in allowed range,
+ # - adds '\0' to the end.
+
+ # Reader accepts
+ # - a `bytes` object,
+ # - a `str` object,
+ # - a file-like object with its `read` method returning `str`,
+ # - a file-like object with its `read` method returning `unicode`.
+
+ # Yeah, it's ugly and slow.
+
+ def __init__(self, stream):
+ self.name = None
+ self.stream = None
+ self.stream_pointer = 0
+ self.eof = True
+ self.buffer = ''
+ self.pointer = 0
+ self.raw_buffer = None
+ self.raw_decode = None
+ self.encoding = None
+ self.index = 0
+ self.line = 0
+ self.column = 0
+ if isinstance(stream, str):
+ self.name = "<unicode string>"
+ self.check_printable(stream)
+ self.buffer = stream+'\0'
+ elif isinstance(stream, bytes):
+ self.name = "<byte string>"
+ self.raw_buffer = stream
+ self.determine_encoding()
+ else:
+ self.stream = stream
+ self.name = getattr(stream, 'name', "<file>")
+ self.eof = False
+ self.raw_buffer = None
+ self.determine_encoding()
+
+ def peek(self, index=0):
+ try:
+ return self.buffer[self.pointer+index]
+ except IndexError:
+ self.update(index+1)
+ return self.buffer[self.pointer+index]
+
+ def prefix(self, length=1):
+ if self.pointer+length >= len(self.buffer):
+ self.update(length)
+ return self.buffer[self.pointer:self.pointer+length]
+
+ def forward(self, length=1):
+ if self.pointer+length+1 >= len(self.buffer):
+ self.update(length+1)
+ while length:
+ ch = self.buffer[self.pointer]
+ self.pointer += 1
+ self.index += 1
+ if ch in '\n\x85\u2028\u2029' \
+ or (ch == '\r' and self.buffer[self.pointer] != '\n'):
+ self.line += 1
+ self.column = 0
+ elif ch != '\uFEFF':
+ self.column += 1
+ length -= 1
+
+ def get_mark(self):
+ if self.stream is None:
+ return Mark(self.name, self.index, self.line, self.column,
+ self.buffer, self.pointer)
+ else:
+ return Mark(self.name, self.index, self.line, self.column,
+ None, None)
+
+ def determine_encoding(self):
+ while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2):
+ self.update_raw()
+ if isinstance(self.raw_buffer, bytes):
+ if self.raw_buffer.startswith(codecs.BOM_UTF16_LE):
+ self.raw_decode = codecs.utf_16_le_decode
+ self.encoding = 'utf-16-le'
+ elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE):
+ self.raw_decode = codecs.utf_16_be_decode
+ self.encoding = 'utf-16-be'
+ else:
+ self.raw_decode = codecs.utf_8_decode
+ self.encoding = 'utf-8'
+ self.update(1)
+
+ NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]')
+ def check_printable(self, data):
+ match = self.NON_PRINTABLE.search(data)
+ if match:
+ character = match.group()
+ position = self.index+(len(self.buffer)-self.pointer)+match.start()
+ raise ReaderError(self.name, position, ord(character),
+ 'unicode', "special characters are not allowed")
+
+ def update(self, length):
+ if self.raw_buffer is None:
+ return
+ self.buffer = self.buffer[self.pointer:]
+ self.pointer = 0
+ while len(self.buffer) < length:
+ if not self.eof:
+ self.update_raw()
+ if self.raw_decode is not None:
+ try:
+ data, converted = self.raw_decode(self.raw_buffer,
+ 'strict', self.eof)
+ except UnicodeDecodeError as exc:
+ character = self.raw_buffer[exc.start]
+ if self.stream is not None:
+ position = self.stream_pointer-len(self.raw_buffer)+exc.start
+ else:
+ position = exc.start
+ raise ReaderError(self.name, position, character,
+ exc.encoding, exc.reason)
+ else:
+ data = self.raw_buffer
+ converted = len(data)
+ self.check_printable(data)
+ self.buffer += data
+ self.raw_buffer = self.raw_buffer[converted:]
+ if self.eof:
+ self.buffer += '\0'
+ self.raw_buffer = None
+ break
+
+ def update_raw(self, size=4096):
+ data = self.stream.read(size)
+ if self.raw_buffer is None:
+ self.raw_buffer = data
+ else:
+ self.raw_buffer += data
+ self.stream_pointer += len(data)
+ if not data:
+ self.eof = True
+
+#try:
+# import psyco
+# psyco.bind(Reader)
+#except ImportError:
+# pass
+
diff --git a/lib/spack/external/yaml/lib3/yaml/representer.py b/lib/spack/external/yaml/lib3/yaml/representer.py
new file mode 100644
index 0000000000..b9e65c5109
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/representer.py
@@ -0,0 +1,387 @@
+
+__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer',
+ 'RepresenterError']
+
+from .error import *
+from .nodes import *
+
+import datetime, sys, copyreg, types, base64, collections
+
+class RepresenterError(YAMLError):
+ pass
+
+class BaseRepresenter:
+
+ yaml_representers = {}
+ yaml_multi_representers = {}
+
+ def __init__(self, default_style=None, default_flow_style=None):
+ self.default_style = default_style
+ self.default_flow_style = default_flow_style
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent(self, data):
+ node = self.represent_data(data)
+ self.serialize(node)
+ self.represented_objects = {}
+ self.object_keeper = []
+ self.alias_key = None
+
+ def represent_data(self, data):
+ if self.ignore_aliases(data):
+ self.alias_key = None
+ else:
+ self.alias_key = id(data)
+ if self.alias_key is not None:
+ if self.alias_key in self.represented_objects:
+ node = self.represented_objects[self.alias_key]
+ #if node is None:
+ # raise RepresenterError("recursive objects are not allowed: %r" % data)
+ return node
+ #self.represented_objects[alias_key] = None
+ self.object_keeper.append(data)
+ data_types = type(data).__mro__
+ if data_types[0] in self.yaml_representers:
+ node = self.yaml_representers[data_types[0]](self, data)
+ else:
+ for data_type in data_types:
+ if data_type in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[data_type](self, data)
+ break
+ else:
+ if None in self.yaml_multi_representers:
+ node = self.yaml_multi_representers[None](self, data)
+ elif None in self.yaml_representers:
+ node = self.yaml_representers[None](self, data)
+ else:
+ node = ScalarNode(None, str(data))
+ #if alias_key is not None:
+ # self.represented_objects[alias_key] = node
+ return node
+
+ @classmethod
+ def add_representer(cls, data_type, representer):
+ if not 'yaml_representers' in cls.__dict__:
+ cls.yaml_representers = cls.yaml_representers.copy()
+ cls.yaml_representers[data_type] = representer
+
+ @classmethod
+ def add_multi_representer(cls, data_type, representer):
+ if not 'yaml_multi_representers' in cls.__dict__:
+ cls.yaml_multi_representers = cls.yaml_multi_representers.copy()
+ cls.yaml_multi_representers[data_type] = representer
+
+ def represent_scalar(self, tag, value, style=None):
+ if style is None:
+ style = self.default_style
+ node = ScalarNode(tag, value, style=style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ return node
+
+ def represent_sequence(self, tag, sequence, flow_style=None):
+ value = []
+ node = SequenceNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ for item in sequence:
+ node_item = self.represent_data(item)
+ if not (isinstance(node_item, ScalarNode) and not node_item.style):
+ best_style = False
+ value.append(node_item)
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def represent_mapping(self, tag, mapping, flow_style=None):
+ value = []
+ node = MappingNode(tag, value, flow_style=flow_style)
+ if self.alias_key is not None:
+ self.represented_objects[self.alias_key] = node
+ best_style = True
+ if hasattr(mapping, 'items'):
+ mapping = list(mapping.items())
+ try:
+ mapping = sorted(mapping)
+ except TypeError:
+ pass
+ for item_key, item_value in mapping:
+ node_key = self.represent_data(item_key)
+ node_value = self.represent_data(item_value)
+ if not (isinstance(node_key, ScalarNode) and not node_key.style):
+ best_style = False
+ if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ best_style = False
+ value.append((node_key, node_value))
+ if flow_style is None:
+ if self.default_flow_style is not None:
+ node.flow_style = self.default_flow_style
+ else:
+ node.flow_style = best_style
+ return node
+
+ def ignore_aliases(self, data):
+ return False
+
+class SafeRepresenter(BaseRepresenter):
+
+ def ignore_aliases(self, data):
+ if data is None:
+ return True
+ if isinstance(data, tuple) and data == ():
+ return True
+ if isinstance(data, (str, bytes, bool, int, float)):
+ return True
+
+ def represent_none(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:null', 'null')
+
+ def represent_str(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:str', data)
+
+ def represent_binary(self, data):
+ if hasattr(base64, 'encodebytes'):
+ data = base64.encodebytes(data).decode('ascii')
+ else:
+ data = base64.encodestring(data).decode('ascii')
+ return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|')
+
+ def represent_bool(self, data):
+ if data:
+ value = 'true'
+ else:
+ value = 'false'
+ return self.represent_scalar('tag:yaml.org,2002:bool', value)
+
+ def represent_int(self, data):
+ return self.represent_scalar('tag:yaml.org,2002:int', str(data))
+
+ inf_value = 1e300
+ while repr(inf_value) != repr(inf_value*inf_value):
+ inf_value *= inf_value
+
+ def represent_float(self, data):
+ if data != data or (data == 0.0 and data == 1.0):
+ value = '.nan'
+ elif data == self.inf_value:
+ value = '.inf'
+ elif data == -self.inf_value:
+ value = '-.inf'
+ else:
+ value = repr(data).lower()
+ # Note that in some cases `repr(data)` represents a float number
+ # without the decimal parts. For instance:
+ # >>> repr(1e17)
+ # '1e17'
+ # Unfortunately, this is not a valid float representation according
+ # to the definition of the `!!float` tag. We fix this by adding
+ # '.0' before the 'e' symbol.
+ if '.' not in value and 'e' in value:
+ value = value.replace('e', '.0e', 1)
+ return self.represent_scalar('tag:yaml.org,2002:float', value)
+
+ def represent_list(self, data):
+ #pairs = (len(data) > 0 and isinstance(data, list))
+ #if pairs:
+ # for item in data:
+ # if not isinstance(item, tuple) or len(item) != 2:
+ # pairs = False
+ # break
+ #if not pairs:
+ return self.represent_sequence('tag:yaml.org,2002:seq', data)
+ #value = []
+ #for item_key, item_value in data:
+ # value.append(self.represent_mapping(u'tag:yaml.org,2002:map',
+ # [(item_key, item_value)]))
+ #return SequenceNode(u'tag:yaml.org,2002:pairs', value)
+
+ def represent_dict(self, data):
+ return self.represent_mapping('tag:yaml.org,2002:map', data)
+
+ def represent_set(self, data):
+ value = {}
+ for key in data:
+ value[key] = None
+ return self.represent_mapping('tag:yaml.org,2002:set', value)
+
+ def represent_date(self, data):
+ value = data.isoformat()
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
+
+ def represent_datetime(self, data):
+ value = data.isoformat(' ')
+ return self.represent_scalar('tag:yaml.org,2002:timestamp', value)
+
+ def represent_yaml_object(self, tag, data, cls, flow_style=None):
+ if hasattr(data, '__getstate__'):
+ state = data.__getstate__()
+ else:
+ state = data.__dict__.copy()
+ return self.represent_mapping(tag, state, flow_style=flow_style)
+
+ def represent_undefined(self, data):
+ raise RepresenterError("cannot represent an object: %s" % data)
+
+SafeRepresenter.add_representer(type(None),
+ SafeRepresenter.represent_none)
+
+SafeRepresenter.add_representer(str,
+ SafeRepresenter.represent_str)
+
+SafeRepresenter.add_representer(bytes,
+ SafeRepresenter.represent_binary)
+
+SafeRepresenter.add_representer(bool,
+ SafeRepresenter.represent_bool)
+
+SafeRepresenter.add_representer(int,
+ SafeRepresenter.represent_int)
+
+SafeRepresenter.add_representer(float,
+ SafeRepresenter.represent_float)
+
+SafeRepresenter.add_representer(list,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(tuple,
+ SafeRepresenter.represent_list)
+
+SafeRepresenter.add_representer(dict,
+ SafeRepresenter.represent_dict)
+
+SafeRepresenter.add_representer(set,
+ SafeRepresenter.represent_set)
+
+SafeRepresenter.add_representer(datetime.date,
+ SafeRepresenter.represent_date)
+
+SafeRepresenter.add_representer(datetime.datetime,
+ SafeRepresenter.represent_datetime)
+
+SafeRepresenter.add_representer(None,
+ SafeRepresenter.represent_undefined)
+
+class Representer(SafeRepresenter):
+
+ def represent_complex(self, data):
+ if data.imag == 0.0:
+ data = '%r' % data.real
+ elif data.real == 0.0:
+ data = '%rj' % data.imag
+ elif data.imag > 0:
+ data = '%r+%rj' % (data.real, data.imag)
+ else:
+ data = '%r%rj' % (data.real, data.imag)
+ return self.represent_scalar('tag:yaml.org,2002:python/complex', data)
+
+ def represent_tuple(self, data):
+ return self.represent_sequence('tag:yaml.org,2002:python/tuple', data)
+
+ def represent_name(self, data):
+ name = '%s.%s' % (data.__module__, data.__name__)
+ return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '')
+
+ def represent_module(self, data):
+ return self.represent_scalar(
+ 'tag:yaml.org,2002:python/module:'+data.__name__, '')
+
+ def represent_object(self, data):
+ # We use __reduce__ API to save the data. data.__reduce__ returns
+ # a tuple of length 2-5:
+ # (function, args, state, listitems, dictitems)
+
+ # For reconstructing, we calls function(*args), then set its state,
+ # listitems, and dictitems if they are not None.
+
+ # A special case is when function.__name__ == '__newobj__'. In this
+ # case we create the object with args[0].__new__(*args).
+
+ # Another special case is when __reduce__ returns a string - we don't
+ # support it.
+
+ # We produce a !!python/object, !!python/object/new or
+ # !!python/object/apply node.
+
+ cls = type(data)
+ if cls in copyreg.dispatch_table:
+ reduce = copyreg.dispatch_table[cls](data)
+ elif hasattr(data, '__reduce_ex__'):
+ reduce = data.__reduce_ex__(2)
+ elif hasattr(data, '__reduce__'):
+ reduce = data.__reduce__()
+ else:
+ raise RepresenterError("cannot represent object: %r" % data)
+ reduce = (list(reduce)+[None]*5)[:5]
+ function, args, state, listitems, dictitems = reduce
+ args = list(args)
+ if state is None:
+ state = {}
+ if listitems is not None:
+ listitems = list(listitems)
+ if dictitems is not None:
+ dictitems = dict(dictitems)
+ if function.__name__ == '__newobj__':
+ function = args[0]
+ args = args[1:]
+ tag = 'tag:yaml.org,2002:python/object/new:'
+ newobj = True
+ else:
+ tag = 'tag:yaml.org,2002:python/object/apply:'
+ newobj = False
+ function_name = '%s.%s' % (function.__module__, function.__name__)
+ if not args and not listitems and not dictitems \
+ and isinstance(state, dict) and newobj:
+ return self.represent_mapping(
+ 'tag:yaml.org,2002:python/object:'+function_name, state)
+ if not listitems and not dictitems \
+ and isinstance(state, dict) and not state:
+ return self.represent_sequence(tag+function_name, args)
+ value = {}
+ if args:
+ value['args'] = args
+ if state or not isinstance(state, dict):
+ value['state'] = state
+ if listitems:
+ value['listitems'] = listitems
+ if dictitems:
+ value['dictitems'] = dictitems
+ return self.represent_mapping(tag+function_name, value)
+
+ def represent_ordered_dict(self, data):
+ # Provide uniform representation across different Python versions.
+ data_type = type(data)
+ tag = 'tag:yaml.org,2002:python/object/apply:%s.%s' \
+ % (data_type.__module__, data_type.__name__)
+ items = [[key, value] for key, value in data.items()]
+ return self.represent_sequence(tag, [items])
+
+Representer.add_representer(complex,
+ Representer.represent_complex)
+
+Representer.add_representer(tuple,
+ Representer.represent_tuple)
+
+Representer.add_representer(type,
+ Representer.represent_name)
+
+Representer.add_representer(collections.OrderedDict,
+ Representer.represent_ordered_dict)
+
+Representer.add_representer(types.FunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.BuiltinFunctionType,
+ Representer.represent_name)
+
+Representer.add_representer(types.ModuleType,
+ Representer.represent_module)
+
+Representer.add_multi_representer(object,
+ Representer.represent_object)
+
diff --git a/lib/spack/external/yaml/lib3/yaml/resolver.py b/lib/spack/external/yaml/lib3/yaml/resolver.py
new file mode 100644
index 0000000000..02b82e73ee
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/resolver.py
@@ -0,0 +1,227 @@
+
+__all__ = ['BaseResolver', 'Resolver']
+
+from .error import *
+from .nodes import *
+
+import re
+
+class ResolverError(YAMLError):
+ pass
+
+class BaseResolver:
+
+ DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str'
+ DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq'
+ DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map'
+
+ yaml_implicit_resolvers = {}
+ yaml_path_resolvers = {}
+
+ def __init__(self):
+ self.resolver_exact_paths = []
+ self.resolver_prefix_paths = []
+
+ @classmethod
+ def add_implicit_resolver(cls, tag, regexp, first):
+ if not 'yaml_implicit_resolvers' in cls.__dict__:
+ implicit_resolvers = {}
+ for key in cls.yaml_implicit_resolvers:
+ implicit_resolvers[key] = cls.yaml_implicit_resolvers[key][:]
+ cls.yaml_implicit_resolvers = implicit_resolvers
+ if first is None:
+ first = [None]
+ for ch in first:
+ cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp))
+
+ @classmethod
+ def add_path_resolver(cls, tag, path, kind=None):
+ # Note: `add_path_resolver` is experimental. The API could be changed.
+ # `new_path` is a pattern that is matched against the path from the
+ # root to the node that is being considered. `node_path` elements are
+ # tuples `(node_check, index_check)`. `node_check` is a node class:
+ # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None`
+ # matches any kind of a node. `index_check` could be `None`, a boolean
+ # value, a string value, or a number. `None` and `False` match against
+ # any _value_ of sequence and mapping nodes. `True` matches against
+ # any _key_ of a mapping node. A string `index_check` matches against
+ # a mapping value that corresponds to a scalar key which content is
+ # equal to the `index_check` value. An integer `index_check` matches
+ # against a sequence value with the index equal to `index_check`.
+ if not 'yaml_path_resolvers' in cls.__dict__:
+ cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy()
+ new_path = []
+ for element in path:
+ if isinstance(element, (list, tuple)):
+ if len(element) == 2:
+ node_check, index_check = element
+ elif len(element) == 1:
+ node_check = element[0]
+ index_check = True
+ else:
+ raise ResolverError("Invalid path element: %s" % element)
+ else:
+ node_check = None
+ index_check = element
+ if node_check is str:
+ node_check = ScalarNode
+ elif node_check is list:
+ node_check = SequenceNode
+ elif node_check is dict:
+ node_check = MappingNode
+ elif node_check not in [ScalarNode, SequenceNode, MappingNode] \
+ and not isinstance(node_check, str) \
+ and node_check is not None:
+ raise ResolverError("Invalid node checker: %s" % node_check)
+ if not isinstance(index_check, (str, int)) \
+ and index_check is not None:
+ raise ResolverError("Invalid index checker: %s" % index_check)
+ new_path.append((node_check, index_check))
+ if kind is str:
+ kind = ScalarNode
+ elif kind is list:
+ kind = SequenceNode
+ elif kind is dict:
+ kind = MappingNode
+ elif kind not in [ScalarNode, SequenceNode, MappingNode] \
+ and kind is not None:
+ raise ResolverError("Invalid node kind: %s" % kind)
+ cls.yaml_path_resolvers[tuple(new_path), kind] = tag
+
+ def descend_resolver(self, current_node, current_index):
+ if not self.yaml_path_resolvers:
+ return
+ exact_paths = {}
+ prefix_paths = []
+ if current_node:
+ depth = len(self.resolver_prefix_paths)
+ for path, kind in self.resolver_prefix_paths[-1]:
+ if self.check_resolver_prefix(depth, path, kind,
+ current_node, current_index):
+ if len(path) > depth:
+ prefix_paths.append((path, kind))
+ else:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ for path, kind in self.yaml_path_resolvers:
+ if not path:
+ exact_paths[kind] = self.yaml_path_resolvers[path, kind]
+ else:
+ prefix_paths.append((path, kind))
+ self.resolver_exact_paths.append(exact_paths)
+ self.resolver_prefix_paths.append(prefix_paths)
+
+ def ascend_resolver(self):
+ if not self.yaml_path_resolvers:
+ return
+ self.resolver_exact_paths.pop()
+ self.resolver_prefix_paths.pop()
+
+ def check_resolver_prefix(self, depth, path, kind,
+ current_node, current_index):
+ node_check, index_check = path[depth-1]
+ if isinstance(node_check, str):
+ if current_node.tag != node_check:
+ return
+ elif node_check is not None:
+ if not isinstance(current_node, node_check):
+ return
+ if index_check is True and current_index is not None:
+ return
+ if (index_check is False or index_check is None) \
+ and current_index is None:
+ return
+ if isinstance(index_check, str):
+ if not (isinstance(current_index, ScalarNode)
+ and index_check == current_index.value):
+ return
+ elif isinstance(index_check, int) and not isinstance(index_check, bool):
+ if index_check != current_index:
+ return
+ return True
+
+ def resolve(self, kind, value, implicit):
+ if kind is ScalarNode and implicit[0]:
+ if value == '':
+ resolvers = self.yaml_implicit_resolvers.get('', [])
+ else:
+ resolvers = self.yaml_implicit_resolvers.get(value[0], [])
+ resolvers += self.yaml_implicit_resolvers.get(None, [])
+ for tag, regexp in resolvers:
+ if regexp.match(value):
+ return tag
+ implicit = implicit[1]
+ if self.yaml_path_resolvers:
+ exact_paths = self.resolver_exact_paths[-1]
+ if kind in exact_paths:
+ return exact_paths[kind]
+ if None in exact_paths:
+ return exact_paths[None]
+ if kind is ScalarNode:
+ return self.DEFAULT_SCALAR_TAG
+ elif kind is SequenceNode:
+ return self.DEFAULT_SEQUENCE_TAG
+ elif kind is MappingNode:
+ return self.DEFAULT_MAPPING_TAG
+
+class Resolver(BaseResolver):
+ pass
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:bool',
+ re.compile(r'''^(?:yes|Yes|YES|no|No|NO
+ |true|True|TRUE|false|False|FALSE
+ |on|On|ON|off|Off|OFF)$''', re.X),
+ list('yYnNtTfFoO'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:float',
+ re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)?
+ |\.[0-9_]+(?:[eE][-+][0-9]+)?
+ |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*
+ |[-+]?\.(?:inf|Inf|INF)
+ |\.(?:nan|NaN|NAN))$''', re.X),
+ list('-+0123456789.'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:int',
+ re.compile(r'''^(?:[-+]?0b[0-1_]+
+ |[-+]?0[0-7_]+
+ |[-+]?(?:0|[1-9][0-9_]*)
+ |[-+]?0x[0-9a-fA-F_]+
+ |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X),
+ list('-+0123456789'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:merge',
+ re.compile(r'^(?:<<)$'),
+ ['<'])
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:null',
+ re.compile(r'''^(?: ~
+ |null|Null|NULL
+ | )$''', re.X),
+ ['~', 'n', 'N', ''])
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:timestamp',
+ re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9]
+ |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]?
+ (?:[Tt]|[ \t]+)[0-9][0-9]?
+ :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)?
+ (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X),
+ list('0123456789'))
+
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:value',
+ re.compile(r'^(?:=)$'),
+ ['='])
+
+# The following resolver is only for documentation purposes. It cannot work
+# because plain scalars cannot start with '!', '&', or '*'.
+Resolver.add_implicit_resolver(
+ 'tag:yaml.org,2002:yaml',
+ re.compile(r'^(?:!|&|\*)$'),
+ list('!&*'))
+
diff --git a/lib/spack/external/yaml/lib3/yaml/scanner.py b/lib/spack/external/yaml/lib3/yaml/scanner.py
new file mode 100644
index 0000000000..c8d127b8ec
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/scanner.py
@@ -0,0 +1,1444 @@
+
+# Scanner produces tokens of the following types:
+# STREAM-START
+# STREAM-END
+# DIRECTIVE(name, value)
+# DOCUMENT-START
+# DOCUMENT-END
+# BLOCK-SEQUENCE-START
+# BLOCK-MAPPING-START
+# BLOCK-END
+# FLOW-SEQUENCE-START
+# FLOW-MAPPING-START
+# FLOW-SEQUENCE-END
+# FLOW-MAPPING-END
+# BLOCK-ENTRY
+# FLOW-ENTRY
+# KEY
+# VALUE
+# ALIAS(value)
+# ANCHOR(value)
+# TAG(value)
+# SCALAR(value, plain, style)
+#
+# Read comments in the Scanner code for more details.
+#
+
+__all__ = ['Scanner', 'ScannerError']
+
+from .error import MarkedYAMLError
+from .tokens import *
+
+class ScannerError(MarkedYAMLError):
+ pass
+
+class SimpleKey:
+ # See below simple keys treatment.
+
+ def __init__(self, token_number, required, index, line, column, mark):
+ self.token_number = token_number
+ self.required = required
+ self.index = index
+ self.line = line
+ self.column = column
+ self.mark = mark
+
+class Scanner:
+
+ def __init__(self):
+ """Initialize the scanner."""
+ # It is assumed that Scanner and Reader will have a common descendant.
+ # Reader do the dirty work of checking for BOM and converting the
+ # input data to Unicode. It also adds NUL to the end.
+ #
+ # Reader supports the following methods
+ # self.peek(i=0) # peek the next i-th character
+ # self.prefix(l=1) # peek the next l characters
+ # self.forward(l=1) # read the next l characters and move the pointer.
+
+ # Had we reached the end of the stream?
+ self.done = False
+
+ # The number of unclosed '{' and '['. `flow_level == 0` means block
+ # context.
+ self.flow_level = 0
+
+ # List of processed tokens that are not yet emitted.
+ self.tokens = []
+
+ # Add the STREAM-START token.
+ self.fetch_stream_start()
+
+ # Number of tokens that were emitted through the `get_token` method.
+ self.tokens_taken = 0
+
+ # The current indentation level.
+ self.indent = -1
+
+ # Past indentation levels.
+ self.indents = []
+
+ # Variables related to simple keys treatment.
+
+ # A simple key is a key that is not denoted by the '?' indicator.
+ # Example of simple keys:
+ # ---
+ # block simple key: value
+ # ? not a simple key:
+ # : { flow simple key: value }
+ # We emit the KEY token before all keys, so when we find a potential
+ # simple key, we try to locate the corresponding ':' indicator.
+ # Simple keys should be limited to a single line and 1024 characters.
+
+ # Can a simple key start at the current position? A simple key may
+ # start:
+ # - at the beginning of the line, not counting indentation spaces
+ # (in block context),
+ # - after '{', '[', ',' (in the flow context),
+ # - after '?', ':', '-' (in the block context).
+ # In the block context, this flag also signifies if a block collection
+ # may start at the current position.
+ self.allow_simple_key = True
+
+ # Keep track of possible simple keys. This is a dictionary. The key
+ # is `flow_level`; there can be no more that one possible simple key
+ # for each level. The value is a SimpleKey record:
+ # (token_number, required, index, line, column, mark)
+ # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow),
+ # '[', or '{' tokens.
+ self.possible_simple_keys = {}
+
+ # Public methods.
+
+ def check_token(self, *choices):
+ # Check if the next token is one of the given types.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ if not choices:
+ return True
+ for choice in choices:
+ if isinstance(self.tokens[0], choice):
+ return True
+ return False
+
+ def peek_token(self):
+ # Return the next token, but do not delete if from the queue.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ return self.tokens[0]
+
+ def get_token(self):
+ # Return the next token.
+ while self.need_more_tokens():
+ self.fetch_more_tokens()
+ if self.tokens:
+ self.tokens_taken += 1
+ return self.tokens.pop(0)
+
+ # Private methods.
+
+ def need_more_tokens(self):
+ if self.done:
+ return False
+ if not self.tokens:
+ return True
+ # The current token may be a potential simple key, so we
+ # need to look further.
+ self.stale_possible_simple_keys()
+ if self.next_possible_simple_key() == self.tokens_taken:
+ return True
+
+ def fetch_more_tokens(self):
+
+ # Eat whitespaces and comments until we reach the next token.
+ self.scan_to_next_token()
+
+ # Remove obsolete possible simple keys.
+ self.stale_possible_simple_keys()
+
+ # Compare the current indentation and column. It may add some tokens
+ # and decrease the current indentation level.
+ self.unwind_indent(self.column)
+
+ # Peek the next character.
+ ch = self.peek()
+
+ # Is it the end of stream?
+ if ch == '\0':
+ return self.fetch_stream_end()
+
+ # Is it a directive?
+ if ch == '%' and self.check_directive():
+ return self.fetch_directive()
+
+ # Is it the document start?
+ if ch == '-' and self.check_document_start():
+ return self.fetch_document_start()
+
+ # Is it the document end?
+ if ch == '.' and self.check_document_end():
+ return self.fetch_document_end()
+
+ # TODO: support for BOM within a stream.
+ #if ch == '\uFEFF':
+ # return self.fetch_bom() <-- issue BOMToken
+
+ # Note: the order of the following checks is NOT significant.
+
+ # Is it the flow sequence start indicator?
+ if ch == '[':
+ return self.fetch_flow_sequence_start()
+
+ # Is it the flow mapping start indicator?
+ if ch == '{':
+ return self.fetch_flow_mapping_start()
+
+ # Is it the flow sequence end indicator?
+ if ch == ']':
+ return self.fetch_flow_sequence_end()
+
+ # Is it the flow mapping end indicator?
+ if ch == '}':
+ return self.fetch_flow_mapping_end()
+
+ # Is it the flow entry indicator?
+ if ch == ',':
+ return self.fetch_flow_entry()
+
+ # Is it the block entry indicator?
+ if ch == '-' and self.check_block_entry():
+ return self.fetch_block_entry()
+
+ # Is it the key indicator?
+ if ch == '?' and self.check_key():
+ return self.fetch_key()
+
+ # Is it the value indicator?
+ if ch == ':' and self.check_value():
+ return self.fetch_value()
+
+ # Is it an alias?
+ if ch == '*':
+ return self.fetch_alias()
+
+ # Is it an anchor?
+ if ch == '&':
+ return self.fetch_anchor()
+
+ # Is it a tag?
+ if ch == '!':
+ return self.fetch_tag()
+
+ # Is it a literal scalar?
+ if ch == '|' and not self.flow_level:
+ return self.fetch_literal()
+
+ # Is it a folded scalar?
+ if ch == '>' and not self.flow_level:
+ return self.fetch_folded()
+
+ # Is it a single quoted scalar?
+ if ch == '\'':
+ return self.fetch_single()
+
+ # Is it a double quoted scalar?
+ if ch == '\"':
+ return self.fetch_double()
+
+ # It must be a plain scalar then.
+ if self.check_plain():
+ return self.fetch_plain()
+
+ # No? It's an error. Let's produce a nice error message.
+ raise ScannerError("while scanning for the next token", None,
+ "found character %r that cannot start any token" % ch,
+ self.get_mark())
+
+ # Simple keys treatment.
+
+ def next_possible_simple_key(self):
+ # Return the number of the nearest possible simple key. Actually we
+ # don't need to loop through the whole dictionary. We may replace it
+ # with the following code:
+ # if not self.possible_simple_keys:
+ # return None
+ # return self.possible_simple_keys[
+ # min(self.possible_simple_keys.keys())].token_number
+ min_token_number = None
+ for level in self.possible_simple_keys:
+ key = self.possible_simple_keys[level]
+ if min_token_number is None or key.token_number < min_token_number:
+ min_token_number = key.token_number
+ return min_token_number
+
+ def stale_possible_simple_keys(self):
+ # Remove entries that are no longer possible simple keys. According to
+ # the YAML specification, simple keys
+ # - should be limited to a single line,
+ # - should be no longer than 1024 characters.
+ # Disabling this procedure will allow simple keys of any length and
+ # height (may cause problems if indentation is broken though).
+ for level in list(self.possible_simple_keys):
+ key = self.possible_simple_keys[level]
+ if key.line != self.line \
+ or self.index-key.index > 1024:
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not find expected ':'", self.get_mark())
+ del self.possible_simple_keys[level]
+
+ def save_possible_simple_key(self):
+ # The next token may start a simple key. We check if it's possible
+ # and save its position. This function is called for
+ # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'.
+
+ # Check if a simple key is required at the current position.
+ required = not self.flow_level and self.indent == self.column
+
+ # The next token might be a simple key. Let's save it's number and
+ # position.
+ if self.allow_simple_key:
+ self.remove_possible_simple_key()
+ token_number = self.tokens_taken+len(self.tokens)
+ key = SimpleKey(token_number, required,
+ self.index, self.line, self.column, self.get_mark())
+ self.possible_simple_keys[self.flow_level] = key
+
+ def remove_possible_simple_key(self):
+ # Remove the saved possible key position at the current flow level.
+ if self.flow_level in self.possible_simple_keys:
+ key = self.possible_simple_keys[self.flow_level]
+
+ if key.required:
+ raise ScannerError("while scanning a simple key", key.mark,
+ "could not find expected ':'", self.get_mark())
+
+ del self.possible_simple_keys[self.flow_level]
+
+ # Indentation functions.
+
+ def unwind_indent(self, column):
+
+ ## In flow context, tokens should respect indentation.
+ ## Actually the condition should be `self.indent >= column` according to
+ ## the spec. But this condition will prohibit intuitively correct
+ ## constructions such as
+ ## key : {
+ ## }
+ #if self.flow_level and self.indent > column:
+ # raise ScannerError(None, None,
+ # "invalid intendation or unclosed '[' or '{'",
+ # self.get_mark())
+
+ # In the flow context, indentation is ignored. We make the scanner less
+ # restrictive then specification requires.
+ if self.flow_level:
+ return
+
+ # In block context, we may need to issue the BLOCK-END tokens.
+ while self.indent > column:
+ mark = self.get_mark()
+ self.indent = self.indents.pop()
+ self.tokens.append(BlockEndToken(mark, mark))
+
+ def add_indent(self, column):
+ # Check if we need to increase indentation.
+ if self.indent < column:
+ self.indents.append(self.indent)
+ self.indent = column
+ return True
+ return False
+
+ # Fetchers.
+
+ def fetch_stream_start(self):
+ # We always add STREAM-START as the first token and STREAM-END as the
+ # last token.
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-START.
+ self.tokens.append(StreamStartToken(mark, mark,
+ encoding=self.encoding))
+
+
+ def fetch_stream_end(self):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+ self.possible_simple_keys = {}
+
+ # Read the token.
+ mark = self.get_mark()
+
+ # Add STREAM-END.
+ self.tokens.append(StreamEndToken(mark, mark))
+
+ # The steam is finished.
+ self.done = True
+
+ def fetch_directive(self):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Scan and add DIRECTIVE.
+ self.tokens.append(self.scan_directive())
+
+ def fetch_document_start(self):
+ self.fetch_document_indicator(DocumentStartToken)
+
+ def fetch_document_end(self):
+ self.fetch_document_indicator(DocumentEndToken)
+
+ def fetch_document_indicator(self, TokenClass):
+
+ # Set the current intendation to -1.
+ self.unwind_indent(-1)
+
+ # Reset simple keys. Note that there could not be a block collection
+ # after '---'.
+ self.remove_possible_simple_key()
+ self.allow_simple_key = False
+
+ # Add DOCUMENT-START or DOCUMENT-END.
+ start_mark = self.get_mark()
+ self.forward(3)
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_start(self):
+ self.fetch_flow_collection_start(FlowSequenceStartToken)
+
+ def fetch_flow_mapping_start(self):
+ self.fetch_flow_collection_start(FlowMappingStartToken)
+
+ def fetch_flow_collection_start(self, TokenClass):
+
+ # '[' and '{' may start a simple key.
+ self.save_possible_simple_key()
+
+ # Increase the flow level.
+ self.flow_level += 1
+
+ # Simple keys are allowed after '[' and '{'.
+ self.allow_simple_key = True
+
+ # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_sequence_end(self):
+ self.fetch_flow_collection_end(FlowSequenceEndToken)
+
+ def fetch_flow_mapping_end(self):
+ self.fetch_flow_collection_end(FlowMappingEndToken)
+
+ def fetch_flow_collection_end(self, TokenClass):
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Decrease the flow level.
+ self.flow_level -= 1
+
+ # No simple keys after ']' or '}'.
+ self.allow_simple_key = False
+
+ # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(TokenClass(start_mark, end_mark))
+
+ def fetch_flow_entry(self):
+
+ # Simple keys are allowed after ','.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add FLOW-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(FlowEntryToken(start_mark, end_mark))
+
+ def fetch_block_entry(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a new entry?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "sequence entries are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-SEQUENCE-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockSequenceStartToken(mark, mark))
+
+ # It's an error for the block entry to occur in the flow context,
+ # but we let the parser detect this.
+ else:
+ pass
+
+ # Simple keys are allowed after '-'.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add BLOCK-ENTRY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(BlockEntryToken(start_mark, end_mark))
+
+ def fetch_key(self):
+
+ # Block context needs additional checks.
+ if not self.flow_level:
+
+ # Are we allowed to start a key (not nessesary a simple)?
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping keys are not allowed here",
+ self.get_mark())
+
+ # We may need to add BLOCK-MAPPING-START.
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after '?' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add KEY.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(KeyToken(start_mark, end_mark))
+
+ def fetch_value(self):
+
+ # Do we determine a simple key?
+ if self.flow_level in self.possible_simple_keys:
+
+ # Add KEY.
+ key = self.possible_simple_keys[self.flow_level]
+ del self.possible_simple_keys[self.flow_level]
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ KeyToken(key.mark, key.mark))
+
+ # If this key starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START.
+ if not self.flow_level:
+ if self.add_indent(key.column):
+ self.tokens.insert(key.token_number-self.tokens_taken,
+ BlockMappingStartToken(key.mark, key.mark))
+
+ # There cannot be two simple keys one after another.
+ self.allow_simple_key = False
+
+ # It must be a part of a complex key.
+ else:
+
+ # Block context needs additional checks.
+ # (Do we really need them? They will be catched by the parser
+ # anyway.)
+ if not self.flow_level:
+
+ # We are allowed to start a complex value if and only if
+ # we can start a simple key.
+ if not self.allow_simple_key:
+ raise ScannerError(None, None,
+ "mapping values are not allowed here",
+ self.get_mark())
+
+ # If this value starts a new block mapping, we need to add
+ # BLOCK-MAPPING-START. It will be detected as an error later by
+ # the parser.
+ if not self.flow_level:
+ if self.add_indent(self.column):
+ mark = self.get_mark()
+ self.tokens.append(BlockMappingStartToken(mark, mark))
+
+ # Simple keys are allowed after ':' in the block context.
+ self.allow_simple_key = not self.flow_level
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Add VALUE.
+ start_mark = self.get_mark()
+ self.forward()
+ end_mark = self.get_mark()
+ self.tokens.append(ValueToken(start_mark, end_mark))
+
+ def fetch_alias(self):
+
+ # ALIAS could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ALIAS.
+ self.allow_simple_key = False
+
+ # Scan and add ALIAS.
+ self.tokens.append(self.scan_anchor(AliasToken))
+
+ def fetch_anchor(self):
+
+ # ANCHOR could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after ANCHOR.
+ self.allow_simple_key = False
+
+ # Scan and add ANCHOR.
+ self.tokens.append(self.scan_anchor(AnchorToken))
+
+ def fetch_tag(self):
+
+ # TAG could start a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after TAG.
+ self.allow_simple_key = False
+
+ # Scan and add TAG.
+ self.tokens.append(self.scan_tag())
+
+ def fetch_literal(self):
+ self.fetch_block_scalar(style='|')
+
+ def fetch_folded(self):
+ self.fetch_block_scalar(style='>')
+
+ def fetch_block_scalar(self, style):
+
+ # A simple key may follow a block scalar.
+ self.allow_simple_key = True
+
+ # Reset possible simple key on the current level.
+ self.remove_possible_simple_key()
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_block_scalar(style))
+
+ def fetch_single(self):
+ self.fetch_flow_scalar(style='\'')
+
+ def fetch_double(self):
+ self.fetch_flow_scalar(style='"')
+
+ def fetch_flow_scalar(self, style):
+
+ # A flow scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after flow scalars.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR.
+ self.tokens.append(self.scan_flow_scalar(style))
+
+ def fetch_plain(self):
+
+ # A plain scalar could be a simple key.
+ self.save_possible_simple_key()
+
+ # No simple keys after plain scalars. But note that `scan_plain` will
+ # change this flag if the scan is finished at the beginning of the
+ # line.
+ self.allow_simple_key = False
+
+ # Scan and add SCALAR. May change `allow_simple_key`.
+ self.tokens.append(self.scan_plain())
+
+ # Checkers.
+
+ def check_directive(self):
+
+ # DIRECTIVE: ^ '%' ...
+ # The '%' indicator is already checked.
+ if self.column == 0:
+ return True
+
+ def check_document_start(self):
+
+ # DOCUMENT-START: ^ '---' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == '---' \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_document_end(self):
+
+ # DOCUMENT-END: ^ '...' (' '|'\n')
+ if self.column == 0:
+ if self.prefix(3) == '...' \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return True
+
+ def check_block_entry(self):
+
+ # BLOCK-ENTRY: '-' (' '|'\n')
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_key(self):
+
+ # KEY(flow context): '?'
+ if self.flow_level:
+ return True
+
+ # KEY(block context): '?' (' '|'\n')
+ else:
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_value(self):
+
+ # VALUE(flow context): ':'
+ if self.flow_level:
+ return True
+
+ # VALUE(block context): ':' (' '|'\n')
+ else:
+ return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029'
+
+ def check_plain(self):
+
+ # A plain scalar may start with any non-space character except:
+ # '-', '?', ':', ',', '[', ']', '{', '}',
+ # '#', '&', '*', '!', '|', '>', '\'', '\"',
+ # '%', '@', '`'.
+ #
+ # It may also start with
+ # '-', '?', ':'
+ # if it is followed by a non-space character.
+ #
+ # Note that we limit the last rule to the block context (except the
+ # '-' character) because we want the flow context to be space
+ # independent.
+ ch = self.peek()
+ return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \
+ or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029'
+ and (ch == '-' or (not self.flow_level and ch in '?:')))
+
+ # Scanners.
+
+ def scan_to_next_token(self):
+ # We ignore spaces, line breaks and comments.
+ # If we find a line break in the block context, we set the flag
+ # `allow_simple_key` on.
+ # The byte order mark is stripped if it's the first character in the
+ # stream. We do not yet support BOM inside the stream as the
+ # specification requires. Any such mark will be considered as a part
+ # of the document.
+ #
+ # TODO: We need to make tab handling rules more sane. A good rule is
+ # Tabs cannot precede tokens
+ # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END,
+ # KEY(block), VALUE(block), BLOCK-ENTRY
+ # So the checking code is
+ # if <TAB>:
+ # self.allow_simple_keys = False
+ # We also need to add the check for `allow_simple_keys == True` to
+ # `unwind_indent` before issuing BLOCK-END.
+ # Scanners for block, flow, and plain scalars need to be modified.
+
+ if self.index == 0 and self.peek() == '\uFEFF':
+ self.forward()
+ found = False
+ while not found:
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ if self.scan_line_break():
+ if not self.flow_level:
+ self.allow_simple_key = True
+ else:
+ found = True
+
+ def scan_directive(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ self.forward()
+ name = self.scan_directive_name(start_mark)
+ value = None
+ if name == 'YAML':
+ value = self.scan_yaml_directive_value(start_mark)
+ end_mark = self.get_mark()
+ elif name == 'TAG':
+ value = self.scan_tag_directive_value(start_mark)
+ end_mark = self.get_mark()
+ else:
+ end_mark = self.get_mark()
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ self.scan_directive_ignored_line(start_mark)
+ return DirectiveToken(name, value, start_mark, end_mark)
+
+ def scan_directive_name(self, start_mark):
+ # See the specification for details.
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ return value
+
+ def scan_yaml_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ major = self.scan_yaml_directive_number(start_mark)
+ if self.peek() != '.':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or '.', but found %r" % self.peek(),
+ self.get_mark())
+ self.forward()
+ minor = self.scan_yaml_directive_number(start_mark)
+ if self.peek() not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit or ' ', but found %r" % self.peek(),
+ self.get_mark())
+ return (major, minor)
+
+ def scan_yaml_directive_number(self, start_mark):
+ # See the specification for details.
+ ch = self.peek()
+ if not ('0' <= ch <= '9'):
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a digit, but found %r" % ch, self.get_mark())
+ length = 0
+ while '0' <= self.peek(length) <= '9':
+ length += 1
+ value = int(self.prefix(length))
+ self.forward(length)
+ return value
+
+ def scan_tag_directive_value(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ handle = self.scan_tag_directive_handle(start_mark)
+ while self.peek() == ' ':
+ self.forward()
+ prefix = self.scan_tag_directive_prefix(start_mark)
+ return (handle, prefix)
+
+ def scan_tag_directive_handle(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_handle('directive', start_mark)
+ ch = self.peek()
+ if ch != ' ':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ return value
+
+ def scan_tag_directive_prefix(self, start_mark):
+ # See the specification for details.
+ value = self.scan_tag_uri('directive', start_mark)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ return value
+
+ def scan_directive_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a directive", start_mark,
+ "expected a comment or a line break, but found %r"
+ % ch, self.get_mark())
+ self.scan_line_break()
+
+ def scan_anchor(self, TokenClass):
+ # The specification does not restrict characters for anchors and
+ # aliases. This may lead to problems, for instance, the document:
+ # [ *alias, value ]
+ # can be interpteted in two ways, as
+ # [ "value" ]
+ # and
+ # [ *alias , "value" ]
+ # Therefore we restrict aliases to numbers and ASCII letters.
+ start_mark = self.get_mark()
+ indicator = self.peek()
+ if indicator == '*':
+ name = 'alias'
+ else:
+ name = 'anchor'
+ self.forward()
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if not length:
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ value = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`':
+ raise ScannerError("while scanning an %s" % name, start_mark,
+ "expected alphabetic or numeric character, but found %r"
+ % ch, self.get_mark())
+ end_mark = self.get_mark()
+ return TokenClass(value, start_mark, end_mark)
+
+ def scan_tag(self):
+ # See the specification for details.
+ start_mark = self.get_mark()
+ ch = self.peek(1)
+ if ch == '<':
+ handle = None
+ self.forward(2)
+ suffix = self.scan_tag_uri('tag', start_mark)
+ if self.peek() != '>':
+ raise ScannerError("while parsing a tag", start_mark,
+ "expected '>', but found %r" % self.peek(),
+ self.get_mark())
+ self.forward()
+ elif ch in '\0 \t\r\n\x85\u2028\u2029':
+ handle = None
+ suffix = '!'
+ self.forward()
+ else:
+ length = 1
+ use_handle = False
+ while ch not in '\0 \r\n\x85\u2028\u2029':
+ if ch == '!':
+ use_handle = True
+ break
+ length += 1
+ ch = self.peek(length)
+ handle = '!'
+ if use_handle:
+ handle = self.scan_tag_handle('tag', start_mark)
+ else:
+ handle = '!'
+ self.forward()
+ suffix = self.scan_tag_uri('tag', start_mark)
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a tag", start_mark,
+ "expected ' ', but found %r" % ch, self.get_mark())
+ value = (handle, suffix)
+ end_mark = self.get_mark()
+ return TagToken(value, start_mark, end_mark)
+
+ def scan_block_scalar(self, style):
+ # See the specification for details.
+
+ if style == '>':
+ folded = True
+ else:
+ folded = False
+
+ chunks = []
+ start_mark = self.get_mark()
+
+ # Scan the header.
+ self.forward()
+ chomping, increment = self.scan_block_scalar_indicators(start_mark)
+ self.scan_block_scalar_ignored_line(start_mark)
+
+ # Determine the indentation level and go to the first non-empty line.
+ min_indent = self.indent+1
+ if min_indent < 1:
+ min_indent = 1
+ if increment is None:
+ breaks, max_indent, end_mark = self.scan_block_scalar_indentation()
+ indent = max(min_indent, max_indent)
+ else:
+ indent = min_indent+increment-1
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ line_break = ''
+
+ # Scan the inner part of the block scalar.
+ while self.column == indent and self.peek() != '\0':
+ chunks.extend(breaks)
+ leading_non_space = self.peek() not in ' \t'
+ length = 0
+ while self.peek(length) not in '\0\r\n\x85\u2028\u2029':
+ length += 1
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ line_break = self.scan_line_break()
+ breaks, end_mark = self.scan_block_scalar_breaks(indent)
+ if self.column == indent and self.peek() != '\0':
+
+ # Unfortunately, folding rules are ambiguous.
+ #
+ # This is the folding according to the specification:
+
+ if folded and line_break == '\n' \
+ and leading_non_space and self.peek() not in ' \t':
+ if not breaks:
+ chunks.append(' ')
+ else:
+ chunks.append(line_break)
+
+ # This is Clark Evans's interpretation (also in the spec
+ # examples):
+ #
+ #if folded and line_break == '\n':
+ # if not breaks:
+ # if self.peek() not in ' \t':
+ # chunks.append(' ')
+ # else:
+ # chunks.append(line_break)
+ #else:
+ # chunks.append(line_break)
+ else:
+ break
+
+ # Chomp the tail.
+ if chomping is not False:
+ chunks.append(line_break)
+ if chomping is True:
+ chunks.extend(breaks)
+
+ # We are done.
+ return ScalarToken(''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ def scan_block_scalar_indicators(self, start_mark):
+ # See the specification for details.
+ chomping = None
+ increment = None
+ ch = self.peek()
+ if ch in '+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch in '0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ elif ch in '0123456789':
+ increment = int(ch)
+ if increment == 0:
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected indentation indicator in the range 1-9, but found 0",
+ self.get_mark())
+ self.forward()
+ ch = self.peek()
+ if ch in '+-':
+ if ch == '+':
+ chomping = True
+ else:
+ chomping = False
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0 \r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected chomping or indentation indicators, but found %r"
+ % ch, self.get_mark())
+ return chomping, increment
+
+ def scan_block_scalar_ignored_line(self, start_mark):
+ # See the specification for details.
+ while self.peek() == ' ':
+ self.forward()
+ if self.peek() == '#':
+ while self.peek() not in '\0\r\n\x85\u2028\u2029':
+ self.forward()
+ ch = self.peek()
+ if ch not in '\0\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a block scalar", start_mark,
+ "expected a comment or a line break, but found %r" % ch,
+ self.get_mark())
+ self.scan_line_break()
+
+ def scan_block_scalar_indentation(self):
+ # See the specification for details.
+ chunks = []
+ max_indent = 0
+ end_mark = self.get_mark()
+ while self.peek() in ' \r\n\x85\u2028\u2029':
+ if self.peek() != ' ':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ else:
+ self.forward()
+ if self.column > max_indent:
+ max_indent = self.column
+ return chunks, max_indent, end_mark
+
+ def scan_block_scalar_breaks(self, indent):
+ # See the specification for details.
+ chunks = []
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == ' ':
+ self.forward()
+ while self.peek() in '\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ end_mark = self.get_mark()
+ while self.column < indent and self.peek() == ' ':
+ self.forward()
+ return chunks, end_mark
+
+ def scan_flow_scalar(self, style):
+ # See the specification for details.
+ # Note that we loose indentation rules for quoted scalars. Quoted
+ # scalars don't need to adhere indentation because " and ' clearly
+ # mark the beginning and the end of them. Therefore we are less
+ # restrictive then the specification requires. We only need to check
+ # that document separators are not included in scalars.
+ if style == '"':
+ double = True
+ else:
+ double = False
+ chunks = []
+ start_mark = self.get_mark()
+ quote = self.peek()
+ self.forward()
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ while self.peek() != quote:
+ chunks.extend(self.scan_flow_scalar_spaces(double, start_mark))
+ chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark))
+ self.forward()
+ end_mark = self.get_mark()
+ return ScalarToken(''.join(chunks), False, start_mark, end_mark,
+ style)
+
+ ESCAPE_REPLACEMENTS = {
+ '0': '\0',
+ 'a': '\x07',
+ 'b': '\x08',
+ 't': '\x09',
+ '\t': '\x09',
+ 'n': '\x0A',
+ 'v': '\x0B',
+ 'f': '\x0C',
+ 'r': '\x0D',
+ 'e': '\x1B',
+ ' ': '\x20',
+ '\"': '\"',
+ '\\': '\\',
+ 'N': '\x85',
+ '_': '\xA0',
+ 'L': '\u2028',
+ 'P': '\u2029',
+ }
+
+ ESCAPE_CODES = {
+ 'x': 2,
+ 'u': 4,
+ 'U': 8,
+ }
+
+ def scan_flow_scalar_non_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ length = 0
+ while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029':
+ length += 1
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ ch = self.peek()
+ if not double and ch == '\'' and self.peek(1) == '\'':
+ chunks.append('\'')
+ self.forward(2)
+ elif (double and ch == '\'') or (not double and ch in '\"\\'):
+ chunks.append(ch)
+ self.forward()
+ elif double and ch == '\\':
+ self.forward()
+ ch = self.peek()
+ if ch in self.ESCAPE_REPLACEMENTS:
+ chunks.append(self.ESCAPE_REPLACEMENTS[ch])
+ self.forward()
+ elif ch in self.ESCAPE_CODES:
+ length = self.ESCAPE_CODES[ch]
+ self.forward()
+ for k in range(length):
+ if self.peek(k) not in '0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "expected escape sequence of %d hexdecimal numbers, but found %r" %
+ (length, self.peek(k)), self.get_mark())
+ code = int(self.prefix(length), 16)
+ chunks.append(chr(code))
+ self.forward(length)
+ elif ch in '\r\n\x85\u2028\u2029':
+ self.scan_line_break()
+ chunks.extend(self.scan_flow_scalar_breaks(double, start_mark))
+ else:
+ raise ScannerError("while scanning a double-quoted scalar", start_mark,
+ "found unknown escape character %r" % ch, self.get_mark())
+ else:
+ return chunks
+
+ def scan_flow_scalar_spaces(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ length = 0
+ while self.peek(length) in ' \t':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch == '\0':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected end of stream", self.get_mark())
+ elif ch in '\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ breaks = self.scan_flow_scalar_breaks(double, start_mark)
+ if line_break != '\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(' ')
+ chunks.extend(breaks)
+ else:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_flow_scalar_breaks(self, double, start_mark):
+ # See the specification for details.
+ chunks = []
+ while True:
+ # Instead of checking indentation, we check for document
+ # separators.
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ raise ScannerError("while scanning a quoted scalar", start_mark,
+ "found unexpected document separator", self.get_mark())
+ while self.peek() in ' \t':
+ self.forward()
+ if self.peek() in '\r\n\x85\u2028\u2029':
+ chunks.append(self.scan_line_break())
+ else:
+ return chunks
+
+ def scan_plain(self):
+ # See the specification for details.
+ # We add an additional restriction for the flow context:
+ # plain scalars in the flow context cannot contain ',', ':' and '?'.
+ # We also keep track of the `allow_simple_key` flag here.
+ # Indentation rules are loosed for the flow context.
+ chunks = []
+ start_mark = self.get_mark()
+ end_mark = start_mark
+ indent = self.indent+1
+ # We allow zero indentation for scalars, but then we need to check for
+ # document separators at the beginning of the line.
+ #if indent == 0:
+ # indent = 1
+ spaces = []
+ while True:
+ length = 0
+ if self.peek() == '#':
+ break
+ while True:
+ ch = self.peek(length)
+ if ch in '\0 \t\r\n\x85\u2028\u2029' \
+ or (not self.flow_level and ch == ':' and
+ self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029') \
+ or (self.flow_level and ch in ',:?[]{}'):
+ break
+ length += 1
+ # It's not clear what we should do with ':' in the flow context.
+ if (self.flow_level and ch == ':'
+ and self.peek(length+1) not in '\0 \t\r\n\x85\u2028\u2029,[]{}'):
+ self.forward(length)
+ raise ScannerError("while scanning a plain scalar", start_mark,
+ "found unexpected ':'", self.get_mark(),
+ "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.")
+ if length == 0:
+ break
+ self.allow_simple_key = False
+ chunks.extend(spaces)
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ end_mark = self.get_mark()
+ spaces = self.scan_plain_spaces(indent, start_mark)
+ if not spaces or self.peek() == '#' \
+ or (not self.flow_level and self.column < indent):
+ break
+ return ScalarToken(''.join(chunks), True, start_mark, end_mark)
+
+ def scan_plain_spaces(self, indent, start_mark):
+ # See the specification for details.
+ # The specification is really confusing about tabs in plain scalars.
+ # We just forbid them completely. Do not use tabs in YAML!
+ chunks = []
+ length = 0
+ while self.peek(length) in ' ':
+ length += 1
+ whitespaces = self.prefix(length)
+ self.forward(length)
+ ch = self.peek()
+ if ch in '\r\n\x85\u2028\u2029':
+ line_break = self.scan_line_break()
+ self.allow_simple_key = True
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return
+ breaks = []
+ while self.peek() in ' \r\n\x85\u2028\u2029':
+ if self.peek() == ' ':
+ self.forward()
+ else:
+ breaks.append(self.scan_line_break())
+ prefix = self.prefix(3)
+ if (prefix == '---' or prefix == '...') \
+ and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029':
+ return
+ if line_break != '\n':
+ chunks.append(line_break)
+ elif not breaks:
+ chunks.append(' ')
+ chunks.extend(breaks)
+ elif whitespaces:
+ chunks.append(whitespaces)
+ return chunks
+
+ def scan_tag_handle(self, name, start_mark):
+ # See the specification for details.
+ # For some strange reasons, the specification does not allow '_' in
+ # tag handles. I have allowed it anyway.
+ ch = self.peek()
+ if ch != '!':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch, self.get_mark())
+ length = 1
+ ch = self.peek(length)
+ if ch != ' ':
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-_':
+ length += 1
+ ch = self.peek(length)
+ if ch != '!':
+ self.forward(length)
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected '!', but found %r" % ch, self.get_mark())
+ length += 1
+ value = self.prefix(length)
+ self.forward(length)
+ return value
+
+ def scan_tag_uri(self, name, start_mark):
+ # See the specification for details.
+ # Note: we do not check if URI is well-formed.
+ chunks = []
+ length = 0
+ ch = self.peek(length)
+ while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \
+ or ch in '-;/?:@&=+$,_.!~*\'()[]%':
+ if ch == '%':
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ chunks.append(self.scan_uri_escapes(name, start_mark))
+ else:
+ length += 1
+ ch = self.peek(length)
+ if length:
+ chunks.append(self.prefix(length))
+ self.forward(length)
+ length = 0
+ if not chunks:
+ raise ScannerError("while parsing a %s" % name, start_mark,
+ "expected URI, but found %r" % ch, self.get_mark())
+ return ''.join(chunks)
+
+ def scan_uri_escapes(self, name, start_mark):
+ # See the specification for details.
+ codes = []
+ mark = self.get_mark()
+ while self.peek() == '%':
+ self.forward()
+ for k in range(2):
+ if self.peek(k) not in '0123456789ABCDEFabcdef':
+ raise ScannerError("while scanning a %s" % name, start_mark,
+ "expected URI escape sequence of 2 hexdecimal numbers, but found %r"
+ % self.peek(k), self.get_mark())
+ codes.append(int(self.prefix(2), 16))
+ self.forward(2)
+ try:
+ value = bytes(codes).decode('utf-8')
+ except UnicodeDecodeError as exc:
+ raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark)
+ return value
+
+ def scan_line_break(self):
+ # Transforms:
+ # '\r\n' : '\n'
+ # '\r' : '\n'
+ # '\n' : '\n'
+ # '\x85' : '\n'
+ # '\u2028' : '\u2028'
+ # '\u2029 : '\u2029'
+ # default : ''
+ ch = self.peek()
+ if ch in '\r\n\x85':
+ if self.prefix(2) == '\r\n':
+ self.forward(2)
+ else:
+ self.forward()
+ return '\n'
+ elif ch in '\u2028\u2029':
+ self.forward()
+ return ch
+ return ''
+
+#try:
+# import psyco
+# psyco.bind(Scanner)
+#except ImportError:
+# pass
+
diff --git a/lib/spack/external/yaml/lib3/yaml/serializer.py b/lib/spack/external/yaml/lib3/yaml/serializer.py
new file mode 100644
index 0000000000..fe911e67ae
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/serializer.py
@@ -0,0 +1,111 @@
+
+__all__ = ['Serializer', 'SerializerError']
+
+from .error import YAMLError
+from .events import *
+from .nodes import *
+
+class SerializerError(YAMLError):
+ pass
+
+class Serializer:
+
+ ANCHOR_TEMPLATE = 'id%03d'
+
+ def __init__(self, encoding=None,
+ explicit_start=None, explicit_end=None, version=None, tags=None):
+ self.use_encoding = encoding
+ self.use_explicit_start = explicit_start
+ self.use_explicit_end = explicit_end
+ self.use_version = version
+ self.use_tags = tags
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+ self.closed = None
+
+ def open(self):
+ if self.closed is None:
+ self.emit(StreamStartEvent(encoding=self.use_encoding))
+ self.closed = False
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ else:
+ raise SerializerError("serializer is already opened")
+
+ def close(self):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif not self.closed:
+ self.emit(StreamEndEvent())
+ self.closed = True
+
+ #def __del__(self):
+ # self.close()
+
+ def serialize(self, node):
+ if self.closed is None:
+ raise SerializerError("serializer is not opened")
+ elif self.closed:
+ raise SerializerError("serializer is closed")
+ self.emit(DocumentStartEvent(explicit=self.use_explicit_start,
+ version=self.use_version, tags=self.use_tags))
+ self.anchor_node(node)
+ self.serialize_node(node, None, None)
+ self.emit(DocumentEndEvent(explicit=self.use_explicit_end))
+ self.serialized_nodes = {}
+ self.anchors = {}
+ self.last_anchor_id = 0
+
+ def anchor_node(self, node):
+ if node in self.anchors:
+ if self.anchors[node] is None:
+ self.anchors[node] = self.generate_anchor(node)
+ else:
+ self.anchors[node] = None
+ if isinstance(node, SequenceNode):
+ for item in node.value:
+ self.anchor_node(item)
+ elif isinstance(node, MappingNode):
+ for key, value in node.value:
+ self.anchor_node(key)
+ self.anchor_node(value)
+
+ def generate_anchor(self, node):
+ self.last_anchor_id += 1
+ return self.ANCHOR_TEMPLATE % self.last_anchor_id
+
+ def serialize_node(self, node, parent, index):
+ alias = self.anchors[node]
+ if node in self.serialized_nodes:
+ self.emit(AliasEvent(alias))
+ else:
+ self.serialized_nodes[node] = True
+ self.descend_resolver(parent, index)
+ if isinstance(node, ScalarNode):
+ detected_tag = self.resolve(ScalarNode, node.value, (True, False))
+ default_tag = self.resolve(ScalarNode, node.value, (False, True))
+ implicit = (node.tag == detected_tag), (node.tag == default_tag)
+ self.emit(ScalarEvent(alias, node.tag, implicit, node.value,
+ style=node.style))
+ elif isinstance(node, SequenceNode):
+ implicit = (node.tag
+ == self.resolve(SequenceNode, node.value, True))
+ self.emit(SequenceStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ index = 0
+ for item in node.value:
+ self.serialize_node(item, node, index)
+ index += 1
+ self.emit(SequenceEndEvent())
+ elif isinstance(node, MappingNode):
+ implicit = (node.tag
+ == self.resolve(MappingNode, node.value, True))
+ self.emit(MappingStartEvent(alias, node.tag, implicit,
+ flow_style=node.flow_style))
+ for key, value in node.value:
+ self.serialize_node(key, node, None)
+ self.serialize_node(value, node, key)
+ self.emit(MappingEndEvent())
+ self.ascend_resolver()
+
diff --git a/lib/spack/external/yaml/lib3/yaml/tokens.py b/lib/spack/external/yaml/lib3/yaml/tokens.py
new file mode 100644
index 0000000000..4d0b48a394
--- /dev/null
+++ b/lib/spack/external/yaml/lib3/yaml/tokens.py
@@ -0,0 +1,104 @@
+
+class Token(object):
+ def __init__(self, start_mark, end_mark):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ def __repr__(self):
+ attributes = [key for key in self.__dict__
+ if not key.endswith('_mark')]
+ attributes.sort()
+ arguments = ', '.join(['%s=%r' % (key, getattr(self, key))
+ for key in attributes])
+ return '%s(%s)' % (self.__class__.__name__, arguments)
+
+#class BOMToken(Token):
+# id = '<byte order mark>'
+
+class DirectiveToken(Token):
+ id = '<directive>'
+ def __init__(self, name, value, start_mark, end_mark):
+ self.name = name
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class DocumentStartToken(Token):
+ id = '<document start>'
+
+class DocumentEndToken(Token):
+ id = '<document end>'
+
+class StreamStartToken(Token):
+ id = '<stream start>'
+ def __init__(self, start_mark=None, end_mark=None,
+ encoding=None):
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.encoding = encoding
+
+class StreamEndToken(Token):
+ id = '<stream end>'
+
+class BlockSequenceStartToken(Token):
+ id = '<block sequence start>'
+
+class BlockMappingStartToken(Token):
+ id = '<block mapping start>'
+
+class BlockEndToken(Token):
+ id = '<block end>'
+
+class FlowSequenceStartToken(Token):
+ id = '['
+
+class FlowMappingStartToken(Token):
+ id = '{'
+
+class FlowSequenceEndToken(Token):
+ id = ']'
+
+class FlowMappingEndToken(Token):
+ id = '}'
+
+class KeyToken(Token):
+ id = '?'
+
+class ValueToken(Token):
+ id = ':'
+
+class BlockEntryToken(Token):
+ id = '-'
+
+class FlowEntryToken(Token):
+ id = ','
+
+class AliasToken(Token):
+ id = '<alias>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class AnchorToken(Token):
+ id = '<anchor>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class TagToken(Token):
+ id = '<tag>'
+ def __init__(self, value, start_mark, end_mark):
+ self.value = value
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+
+class ScalarToken(Token):
+ id = '<scalar>'
+ def __init__(self, value, plain, start_mark, end_mark, style=None):
+ self.value = value
+ self.plain = plain
+ self.start_mark = start_mark
+ self.end_mark = end_mark
+ self.style = style
+
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index f456a5edf1..8922010e70 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -175,9 +175,9 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
def set_install_permissions(path):
"""Set appropriate permissions on the installed file."""
if os.path.isdir(path):
- os.chmod(path, 0755)
+ os.chmod(path, 0o755)
else:
- os.chmod(path, 0644)
+ os.chmod(path, 0o644)
def copy_mode(src, dest):
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index d9fef42e53..ec4c25fead 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -27,11 +27,18 @@ import re
import functools
import collections
import inspect
+from six import string_types
# Ignore emacs backups when listing modules
ignore_modules = [r'^\.#', '~$']
+class classproperty(property):
+ """classproperty decorator: like property but for classmethods."""
+ def __get__(self, cls, owner):
+ return self.fget.__get__(None, owner)()
+
+
def index_by(objects, *funcs):
"""Create a hierarchy of dictionaries by splitting the supplied
set of objects on unique values of the supplied functions.
@@ -80,7 +87,7 @@ def index_by(objects, *funcs):
return objects
f = funcs[0]
- if isinstance(f, basestring):
+ if isinstance(f, str):
f = lambda x: getattr(x, funcs[0])
elif isinstance(f, tuple):
f = lambda x: tuple(getattr(x, p) for p in funcs[0])
@@ -326,7 +333,7 @@ def match_predicate(*args):
"""
def match(string):
for arg in args:
- if isinstance(arg, basestring):
+ if isinstance(arg, string_types):
if re.search(arg, string):
return True
elif isinstance(arg, list) or isinstance(arg, tuple):
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index f73d96a4e4..f78d889037 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -29,7 +29,7 @@ import fcntl
import termios
import struct
import traceback
-from StringIO import StringIO
+from six import StringIO
from llnl.util.tty.color import *
@@ -93,7 +93,7 @@ def msg(message, *args, **kwargs):
else:
cwrite("@*b{%s==>} %s" % (st_text, cescape(message)))
for arg in args:
- print indent + str(arg)
+ print(indent + str(arg))
def info(message, *args, **kwargs):
@@ -201,7 +201,7 @@ def get_yes_or_no(prompt, **kwargs):
if not ans:
result = default_value
if result is None:
- print "Please enter yes or no."
+ print("Please enter yes or no.")
else:
if ans == 'y' or ans == 'yes':
result = True
@@ -239,7 +239,7 @@ def hline(label=None, **kwargs):
out.write(label)
out.write(suffix)
- print out.getvalue()
+ print(out.getvalue())
def terminal_size():
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index 67acdfa517..83de530ef1 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -25,9 +25,11 @@
"""
Routines for printing columnar output. See colify() for more information.
"""
+from __future__ import division
+
import os
import sys
-from StringIO import StringIO
+from six import StringIO
from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen, cextra
@@ -64,18 +66,18 @@ def config_variable_cols(elts, console_width, padding, cols=0):
# Get a bound on the most columns we could possibly have.
# 'clen' ignores length of ansi color sequences.
lengths = [clen(e) for e in elts]
- max_cols = max(1, console_width / (min(lengths) + padding))
+ max_cols = max(1, console_width // (min(lengths) + padding))
max_cols = min(len(elts), max_cols)
# Range of column counts to try. If forced, use the supplied value.
- col_range = [cols] if cols else xrange(1, max_cols + 1)
+ col_range = [cols] if cols else range(1, max_cols + 1)
# Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range]
for i, length in enumerate(lengths):
for conf in configs:
if conf.valid:
- col = i / ((len(elts) + conf.cols - 1) / conf.cols)
+ col = i // ((len(elts) + conf.cols - 1) // conf.cols)
p = padding if col < (conf.cols - 1) else 0
if conf.widths[col] < (length + p):
@@ -107,7 +109,7 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
# 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding
if cols == 0:
- cols = max(1, console_width / max_len)
+ cols = max(1, console_width // max_len)
cols = min(len(elts), cols)
config = ColumnConfig(cols)
@@ -193,12 +195,12 @@ def colify(elts, **options):
raise ValueError("method must be one of: " + allowed_methods)
cols = config.cols
- rows = (len(elts) + cols - 1) / cols
+ rows = (len(elts) + cols - 1) // cols
rows_last_col = len(elts) % rows
- for row in xrange(rows):
+ for row in range(rows):
output.write(" " * indent)
- for col in xrange(cols):
+ for col in range(cols):
elt = col * rows + row
width = config.widths[col] + cextra(elts[elt])
if col < cols - 1:
@@ -233,7 +235,7 @@ def colify_table(table, **options):
columns = len(table[0])
def transpose():
- for i in xrange(columns):
+ for i in range(columns):
for row in table:
yield row[i]
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index b1d45214ab..50e07c0b97 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -165,8 +165,12 @@ class log_output(object):
self.p.join(60.0) # 1 minute to join the child
def _spawn_writing_daemon(self, read, input_stream):
- # Parent: read from child, skip the with block.
- read_file = os.fdopen(read, 'r', 0)
+ # This is the Parent: read from child, skip the with block.
+
+ # Use line buffering (3rd param = 1) since Python 3 has a bug
+ # that prevents unbuffered text I/O.
+ read_file = os.fdopen(read, 'r', 1)
+
with open(self.filename, 'w') as log_file:
with keyboard_input(input_stream):
while True:
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 6a28fbb2b0..345a804dfe 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -78,7 +78,6 @@ import spack.error
import spack.config
import spack.fetch_strategy
from spack.file_cache import FileCache
-from spack.package_prefs import PreferredPackages
from spack.abi import ABI
from spack.concretize import DefaultConcretizer
from spack.version import Version
@@ -96,7 +95,7 @@ spack_version = Version("0.10.0")
try:
repo = spack.repository.RepoPath()
sys.meta_path.append(repo)
-except spack.error.SpackError, e:
+except spack.error.SpackError as e:
tty.die('while initializing Spack RepoPath:', e.message)
@@ -162,6 +161,7 @@ from spack.build_systems.autotools import AutotoolsPackage
from spack.build_systems.cmake import CMakePackage
from spack.build_systems.python import PythonPackage
from spack.build_systems.r import RPackage
+from spack.build_systems.perl import PerlPackage
__all__ += [
'run_before',
@@ -172,7 +172,8 @@ __all__ += [
'AutotoolsPackage',
'MakefilePackage',
'PythonPackage',
- 'RPackage'
+ 'RPackage',
+ 'PerlPackage'
]
from spack.version import Version, ver
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index e44e0dc109..bace3c49f6 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -287,7 +287,7 @@ class OperatingSystem(object):
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
- clist = reduce(lambda x, y: x + y, compiler_lists)
+ clist = [comp for cl in compiler_lists for comp in cl]
return clist
def find_compiler(self, cmp_cls, *path):
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 3e6dc12b35..a20a7b4db8 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -57,6 +57,7 @@ import os
import shutil
import sys
import traceback
+from six import iteritems
import llnl.util.lang as lang
import llnl.util.tty as tty
@@ -310,7 +311,7 @@ def set_build_environment_variables(pkg, env, dirty=False):
environment = compiler.environment
if 'set' in environment:
env_to_set = environment['set']
- for key, value in env_to_set.iteritems():
+ for key, value in iteritems(env_to_set):
env.set('SPACK_ENV_SET_%s' % key, value)
env.set('%s' % key, value)
# Let shell know which variables to set
@@ -322,8 +323,9 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
# Add bin directories from dependencies to the PATH for the build.
- bin_dirs = reversed(filter(os.path.isdir, [
- '%s/bin' % d.prefix for d in pkg.spec.dependencies(deptype='build')]))
+ bin_dirs = reversed(
+ [d.prefix.bin for d in pkg.spec.dependencies(deptype='build')
+ if os.path.isdir(d.prefix.bin)])
bin_dirs = filter_system_bin_paths(bin_dirs)
for item in bin_dirs:
env.prepend_path('PATH', item)
diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py
index a11a84acd0..ffd00e7f69 100644
--- a/lib/spack/spack/build_systems/autotools.py
+++ b/lib/spack/spack/build_systems/autotools.py
@@ -49,7 +49,8 @@ class AutotoolsPackage(PackageBase):
4. :py:meth:`~.AutotoolsPackage.install`
They all have sensible defaults and for many packages the only thing
- necessary will be to override the helper method :py:meth:`.configure_args`.
+ necessary will be to override the helper method
+ :py:meth:`~.AutotoolsPackage.configure_args`.
For a finer tuning you may also override:
+-----------------------------------------------+--------------------+
@@ -145,7 +146,7 @@ class AutotoolsPackage(PackageBase):
if config_guess is not None:
try:
check_call([config_guess], stdout=PIPE, stderr=PIPE)
- mod = stat(my_config_guess).st_mode & 0777 | S_IWUSR
+ mod = stat(my_config_guess).st_mode & 0o777 | S_IWUSR
os.chmod(my_config_guess, mod)
shutil.copyfile(config_guess, my_config_guess)
return True
@@ -234,7 +235,7 @@ class AutotoolsPackage(PackageBase):
appropriately, otherwise raises an error.
:raises RuntimeError: if a configure script is not found in
- :py:meth:`~.configure_directory`
+ :py:meth:`~AutotoolsPackage.configure_directory`
"""
# Check if a configure script is there. If not raise a RuntimeError.
if not os.path.exists(self.configure_abs_path):
@@ -255,7 +256,8 @@ class AutotoolsPackage(PackageBase):
return []
def configure(self, spec, prefix):
- """Runs configure with the arguments specified in :py:meth:`.configure_args`
+ """Runs configure with the arguments specified in
+ :py:meth:`~.AutotoolsPackage.configure_args`
and an appropriately set prefix.
"""
options = ['--prefix={0}'.format(prefix)] + self.configure_args()
diff --git a/lib/spack/spack/build_systems/perl.py b/lib/spack/spack/build_systems/perl.py
new file mode 100644
index 0000000000..78184c85dc
--- /dev/null
+++ b/lib/spack/spack/build_systems/perl.py
@@ -0,0 +1,117 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import inspect
+import os
+
+from llnl.util.filesystem import join_path
+from spack.directives import extends
+from spack.package import PackageBase, run_after
+from spack.util.executable import Executable
+
+
+class PerlPackage(PackageBase):
+ """Specialized class for packages that are built using Perl.
+
+ This class provides four phases that can be overridden if required:
+
+ 1. :py:meth:`~.PerlPackage.configure`
+ 2. :py:meth:`~.PerlPackage.build`
+ 3. :py:meth:`~.PerlPackage.check`
+ 4. :py:meth:`~.PerlPackage.install`
+
+ The default methods use, in order of preference:
+ (1) Makefile.PL,
+ (2) Build.PL.
+
+ Some packages may need to override
+ :py:meth:`~.PerlPackage.configure_args`,
+ which produces a list of arguments for
+ :py:meth:`~.PerlPackage.configure`.
+ Arguments should not include the installation base directory.
+ """
+ #: Phases of a Perl package
+ phases = ['configure', 'build', 'install']
+
+ #: This attribute is used in UI queries that need to know the build
+ #: system base class
+ build_system_class = 'PerlPackage'
+
+ #: Callback names for build-time test
+ build_time_test_callbacks = ['check']
+
+ extends('perl')
+
+ def configure_args(self):
+ """Produces a list containing the arguments that must be passed to
+ :py:meth:`~.PerlPackage.configure`. Arguments should not include
+ the installation base directory, which is prepended automatically.
+
+ :return: list of arguments for Makefile.PL or Build.PL
+ """
+ return []
+
+ def configure(self, spec, prefix):
+ """Runs Makefile.PL or Build.PL with arguments consisting of
+ an appropriate installation base directory followed by the
+ list returned by :py:meth:`~.PerlPackage.configure_args`.
+
+ :raise RuntimeError: if neither Makefile.PL or Build.PL exist
+ """
+ if os.path.isfile('Makefile.PL'):
+ self.build_method = 'Makefile.PL'
+ self.build_executable = inspect.getmodule(self).make
+ elif os.path.isfile('Build.PL'):
+ self.build_method = 'Build.PL'
+ self.build_executable = Executable(
+ join_path(self.stage.source_path, 'Build'))
+ else:
+ raise RuntimeError('Unknown build_method for perl package')
+
+ if self.build_method == 'Makefile.PL':
+ options = ['Makefile.PL', 'INSTALL_BASE={0}'.format(prefix)]
+ elif self.build_method == 'Build.PL':
+ options = ['Build.PL', '--install_base', prefix]
+ options += self.configure_args()
+
+ inspect.getmodule(self).perl(*options)
+
+ def build(self, spec, prefix):
+ """Builds a Perl package."""
+ self.build_executable()
+
+ # Ensure that tests run after build (if requested):
+ run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+
+ def check(self):
+ """Runs built-in tests of a Perl package."""
+ self.build_executable('test')
+
+ def install(self, spec, prefix):
+ """Installs a Perl package."""
+ self.build_executable('install')
+
+ # Check that self.prefix is there after installation
+ run_after('install')(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py
index d2ee72925d..2c8ccebae6 100644
--- a/lib/spack/spack/build_systems/python.py
+++ b/lib/spack/spack/build_systems/python.py
@@ -24,6 +24,7 @@
##############################################################################
import inspect
+import os
from spack.directives import extends
from spack.package import PackageBase, run_after
@@ -91,10 +92,26 @@ class PythonPackage(PackageBase):
# Default phases
phases = ['build', 'install']
+ # Name of modules that the Python package provides
+ # This is used to test whether or not the installation succeeded
+ # These names generally come from running:
+ #
+ # >>> import setuptools
+ # >>> setuptools.find_packages()
+ #
+ # in the source tarball directory
+ import_modules = []
+
# To be used in UI queries that require to know which
# build-system class we are using
build_system_class = 'PythonPackage'
+ #: Callback names for build-time test
+ build_time_test_callbacks = ['test']
+
+ #: Callback names for install-time test
+ install_time_test_callbacks = ['import_module_test']
+
extends('python')
def setup_file(self):
@@ -106,19 +123,38 @@ class PythonPackage(PackageBase):
"""The directory containing the ``setup.py`` file."""
return self.stage.source_path
- def python(self, *args):
- inspect.getmodule(self).python(*args)
+ def python(self, *args, **kwargs):
+ inspect.getmodule(self).python(*args, **kwargs)
- def setup_py(self, *args):
+ def setup_py(self, *args, **kwargs):
setup = self.setup_file()
with working_dir(self.build_directory):
- self.python(setup, '--no-user-cfg', *args)
+ self.python(setup, '--no-user-cfg', *args, **kwargs)
+
+ def _setup_command_available(self, command):
+ """Determines whether or not a setup.py command exists.
+
+ :param str command: The command to look for
+ :return: True if the command is found, else False
+ :rtype: bool
+ """
+ kwargs = {
+ 'output': os.devnull,
+ 'error': os.devnull,
+ 'fail_on_error': False
+ }
+
+ python = inspect.getmodule(self).python
+ setup = self.setup_file()
+
+ python(setup, '--no-user-cfg', command, '--help', **kwargs)
+ return python.returncode == 0
# The following phases and their descriptions come from:
# $ python setup.py --help-commands
- # Only standard commands are included here, but some packages
- # define extra commands as well
+
+ # Standard commands
def build(self, spec, prefix):
"""Build everything needed to install."""
@@ -306,5 +342,37 @@ class PythonPackage(PackageBase):
"""Arguments to pass to check."""
return []
+ # Testing
+
+ def test(self):
+ """Run unit tests after in-place build.
+
+ These tests are only run if the package actually has a 'test' command.
+ """
+ if self._setup_command_available('test'):
+ args = self.test_args(self.spec, self.prefix)
+
+ self.setup_py('test', *args)
+
+ def test_args(self, spec, prefix):
+ """Arguments to pass to test."""
+ return []
+
+ run_after('build')(PackageBase._run_default_build_time_test_callbacks)
+
+ def import_module_test(self):
+ """Attempts to import the module that was just installed.
+
+ This test is only run if the package overrides
+ :py:attr:`import_modules` with a list of module names."""
+
+ # Make sure we are importing the installed modules,
+ # not the ones in the current directory
+ with working_dir('..'):
+ for module in self.import_modules:
+ self.python('-c', 'import {0}'.format(module))
+
+ run_after('install')(PackageBase._run_default_install_time_test_callbacks)
+
# Check that self.prefix is there after installation
run_after('install')(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 3a42510245..622ef4d96c 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import os
import re
import sys
@@ -186,7 +188,7 @@ def display_specs(specs, **kwargs):
# Traverse the index and print out each package
for i, (architecture, compiler) in enumerate(sorted(index)):
if i > 0:
- print
+ print()
header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
architecture, spack.spec.compiler_color,
@@ -205,7 +207,7 @@ def display_specs(specs, **kwargs):
for abbrv, spec in zip(abbreviated, specs):
prefix = gray_hash(spec, hlen) if hashes else ''
- print prefix + (format % (abbrv, spec.prefix))
+ print(prefix + (format % (abbrv, spec.prefix)))
elif mode == 'deps':
for spec in specs:
diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py
index 5b9daf9dea..1079e7f215 100644
--- a/lib/spack/spack/cmd/arch.py
+++ b/lib/spack/spack/cmd/arch.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import spack.architecture as architecture
description = "print architecture information about this machine"
@@ -36,6 +38,6 @@ def setup_parser(subparser):
def arch(parser, args):
if args.platform:
- print architecture.platform()
+ print(architecture.platform())
else:
- print architecture.sys_type()
+ print(architecture.sys_type())
diff --git a/lib/spack/spack/cmd/build.py b/lib/spack/spack/cmd/build.py
index 6a90af907d..90157a85af 100644
--- a/lib/spack/spack/cmd/build.py
+++ b/lib/spack/spack/cmd/build.py
@@ -31,7 +31,8 @@ description = 'stops at build stage when installing a package, if possible'
build_system_to_phase = {
CMakePackage: 'build',
AutotoolsPackage: 'build',
- PythonPackage: 'build'
+ PythonPackage: 'build',
+ PerlPackage: 'build'
}
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
index a8bdcf692f..3115501439 100644
--- a/lib/spack/spack/cmd/common/arguments.py
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -81,7 +81,7 @@ _arguments['constraint'] = Args(
_arguments['module_type'] = Args(
'-m', '--module-type',
choices=spack.modules.module_types.keys(),
- default=spack.modules.module_types.keys()[0],
+ default=list(spack.modules.module_types.keys())[0],
help='type of module files [default: %(default)s]')
_arguments['yes_to_all'] = Args(
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index 22f3b3f26a..6067d44c5e 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -22,8 +22,11 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import argparse
import sys
+from six import iteritems
import llnl.util.tty as tty
import spack.compilers
@@ -142,36 +145,36 @@ def compiler_info(args):
tty.error("No compilers match spec %s" % cspec)
else:
for c in compilers:
- print str(c.spec) + ":"
- print "\tpaths:"
+ print(str(c.spec) + ":")
+ print("\tpaths:")
for cpath in ['cc', 'cxx', 'f77', 'fc']:
- print "\t\t%s = %s" % (cpath, getattr(c, cpath, None))
+ print("\t\t%s = %s" % (cpath, getattr(c, cpath, None)))
if c.flags:
- print "\tflags:"
- for flag, flag_value in c.flags.iteritems():
- print "\t\t%s = %s" % (flag, flag_value)
+ print("\tflags:")
+ for flag, flag_value in iteritems(c.flags):
+ print("\t\t%s = %s" % (flag, flag_value))
if len(c.environment) != 0:
if len(c.environment['set']) != 0:
- print "\tenvironment:"
- print "\t set:"
- for key, value in c.environment['set'].iteritems():
- print "\t %s = %s" % (key, value)
+ print("\tenvironment:")
+ print("\t set:")
+ for key, value in iteritems(c.environment['set']):
+ print("\t %s = %s" % (key, value))
if c.extra_rpaths:
- print "\tExtra rpaths:"
+ print("\tExtra rpaths:")
for extra_rpath in c.extra_rpaths:
- print "\t\t%s" % extra_rpath
- print "\tmodules = %s" % c.modules
- print "\toperating system = %s" % c.operating_system
+ print("\t\t%s" % extra_rpath)
+ print("\tmodules = %s" % c.modules)
+ print("\toperating system = %s" % c.operating_system)
def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(scope=args.scope),
lambda c: (c.spec.name, c.operating_system, c.target))
- ordered_sections = sorted(index.items(), key=lambda (k, v): k)
+ ordered_sections = sorted(index.items(), key=lambda item: item[0])
for i, (key, compilers) in enumerate(ordered_sections):
if i >= 1:
- print
+ print()
name, os, target = key
os_str = os
if target:
diff --git a/lib/spack/spack/cmd/configure.py b/lib/spack/spack/cmd/configure.py
index 7b1ef04522..037705f480 100644
--- a/lib/spack/spack/cmd/configure.py
+++ b/lib/spack/spack/cmd/configure.py
@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
import argparse
import llnl.util.tty as tty
@@ -36,7 +35,8 @@ description = 'stops at configuration stage when installing a package, if possib
build_system_to_phase = {
CMakePackage: 'cmake',
- AutotoolsPackage: 'configure'
+ AutotoolsPackage: 'configure',
+ PerlPackage: 'configure'
}
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 14b213a756..906c7e1aec 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -31,13 +31,13 @@ import llnl.util.tty as tty
import spack
import spack.cmd
import spack.cmd.checksum
-import spack.url
import spack.util.web
from llnl.util.filesystem import mkdirp
from spack.repository import Repo
from spack.spec import Spec
from spack.util.executable import which
from spack.util.naming import *
+from spack.url import *
description = "create a new package file"
@@ -268,6 +268,45 @@ class RPackageTemplate(PackageTemplate):
super(RPackageTemplate, self).__init__(name, *args)
+class PerlmakePackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for Perl extensions
+ that come with a Makefile.PL"""
+ base_class_name = 'PerlPackage'
+
+ dependencies = """\
+ # FIXME: Add dependencies if required:
+ # depends_on('perl-foo')
+ # depends_on('barbaz', type=('build', 'link', 'run'))"""
+
+ body = """\
+ # FIXME: If non-standard arguments are used for configure step:
+ # def configure_args(self):
+ # return ['my', 'configure', 'args']
+
+ # FIXME: in unusual cases, it may be necessary to override methods for
+ # configure(), build(), check() or install()."""
+
+ def __init__(self, name, *args):
+ # If the user provided `--name perl-cpp`, don't rename it perl-perl-cpp
+ if not name.startswith('perl-'):
+ # Make it more obvious that we are renaming the package
+ tty.msg("Changing package name from {0} to perl-{0}".format(name))
+ name = 'perl-{0}'.format(name)
+
+ super(PerlmakePackageTemplate, self).__init__(name, *args)
+
+
+class PerlbuildPackageTemplate(PerlmakePackageTemplate):
+ """Provides appropriate overrides for Perl extensions
+ that come with a Build.PL instead of a Makefile.PL"""
+ dependencies = """\
+ depends_on('perl-module-build', type='build')
+
+ # FIXME: Add additional dependencies if required:
+ # depends_on('perl-foo')
+ # depends_on('barbaz', type=('build', 'link', 'run'))"""
+
+
class OctavePackageTemplate(PackageTemplate):
"""Provides appropriate overrides for octave packages"""
@@ -305,6 +344,8 @@ templates = {
'bazel': BazelPackageTemplate,
'python': PythonPackageTemplate,
'r': RPackageTemplate,
+ 'perlmake': PerlmakePackageTemplate,
+ 'perlbuild': PerlbuildPackageTemplate,
'octave': OctavePackageTemplate,
'generic': PackageTemplate
}
@@ -341,6 +382,10 @@ class BuildSystemGuesser:
can take a peek at the fetched tarball and discern the build system it uses
"""
+ def __init__(self):
+ """Sets the default build system."""
+ self.build_system = 'generic'
+
def __call__(self, stage, url):
"""Try to guess the type of build system used by a project based on
the contents of its archive or the URL it was downloaded from."""
@@ -363,7 +408,9 @@ class BuildSystemGuesser:
(r'/SConstruct$', 'scons'),
(r'/setup.py$', 'python'),
(r'/NAMESPACE$', 'r'),
- (r'/WORKSPACE$', 'bazel')
+ (r'/WORKSPACE$', 'bazel'),
+ (r'/Build.PL$', 'perlbuild'),
+ (r'/Makefile.PL$', 'perlmake'),
]
# Peek inside the compressed file.
@@ -384,14 +431,11 @@ class BuildSystemGuesser:
# Determine the build system based on the files contained
# in the archive.
- build_system = 'generic'
for pattern, bs in clues:
if any(re.search(pattern, l) for l in lines):
- build_system = bs
+ self.build_system = bs
break
- self.build_system = build_system
-
def get_name(args):
"""Get the name of the package based on the supplied arguments.
@@ -415,9 +459,9 @@ def get_name(args):
elif args.url:
# Try to guess the package name based on the URL
try:
- name = spack.url.parse_name(args.url)
+ name = parse_name(args.url)
tty.msg("This looks like a URL for {0}".format(name))
- except spack.url.UndetectableNameError:
+ except UndetectableNameError:
tty.die("Couldn't guess a name for this package.",
" Please report this bug. In the meantime, try running:",
" `spack create --name <name> <url>`")
@@ -472,11 +516,16 @@ def get_versions(args, name):
if args.url:
# Find available versions
- url_dict = spack.util.web.find_versions_of_archive(args.url)
+ try:
+ url_dict = spack.util.web.find_versions_of_archive(args.url)
+ except UndetectableVersionError:
+ # Use fake versions
+ tty.warn("Couldn't detect version in: {0}".format(args.url))
+ return versions, guesser
if not url_dict:
# If no versions were found, revert to what the user provided
- version = spack.url.parse_version(args.url)
+ version = parse_version(args.url)
url_dict = {version: args.url}
versions = spack.cmd.checksum.get_checksums(
@@ -568,6 +617,7 @@ def create(parser, args):
url = get_url(args)
versions, guesser = get_versions(args, name)
build_system = get_build_system(args, guesser)
+ name = simplify_name(name)
# Create the package template object
PackageClass = templates[build_system]
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index 42181b5502..c752ffb943 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -50,4 +50,4 @@ def dependents(parser, args):
if deps:
spack.cmd.display_specs(deps)
else:
- print "No dependents"
+ print("No dependents")
diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py
index 49fc48700c..ed18940ac0 100644
--- a/lib/spack/spack/cmd/env.py
+++ b/lib/spack/spack/cmd/env.py
@@ -22,8 +22,11 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import os
import argparse
+
import llnl.util.tty as tty
import spack.cmd
import spack.build_environment as build_env
@@ -64,7 +67,7 @@ def env(parser, args):
if not cmd:
# If no command act like the "env" command and print out env vars.
for key, val in os.environ.items():
- print "%s=%s" % (key, val)
+ print("%s=%s" % (key, val))
else:
# Otherwise execute the command with the new environment
diff --git a/lib/spack/spack/cmd/flake8.py b/lib/spack/spack/cmd/flake8.py
index d5ed9adf18..a6dc941190 100644
--- a/lib/spack/spack/cmd/flake8.py
+++ b/lib/spack/spack/cmd/flake8.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import re
import os
import sys
@@ -68,7 +70,7 @@ exemptions = {
# exemptions applied to all files.
r'.py$': {
# Exempt lines with URLs from overlong line errors.
- 501: [r'(https?|file)\:']
+ 501: [r'(https?|ftp|file)\:']
},
}
@@ -127,7 +129,7 @@ def filter_file(source, dest, output=False):
for code, patterns in errors.items():
for pattern in patterns:
if pattern.search(line):
- line += (" # NOQA: ignore=%d" % code)
+ line += (" # NOQA: E%d" % code)
break
oline = line + '\n'
@@ -175,12 +177,12 @@ def flake8(parser, args):
file_list = changed_files()
shutil.copy('.flake8', os.path.join(temp, '.flake8'))
- print '======================================================='
- print 'flake8: running flake8 code checks on spack.'
- print
- print 'Modified files:'
+ print('=======================================================')
+ print('flake8: running flake8 code checks on spack.')
+ print()
+ print('Modified files:')
for filename in file_list:
- print " %s" % filename.strip()
+ print(" %s" % filename.strip())
print('=======================================================')
# filter files into a temporary directory with exemptions added.
@@ -196,7 +198,7 @@ def flake8(parser, args):
if args.root_relative:
# print results relative to repo root.
- print output
+ print(output)
else:
# print results relative to current working directory
def cwd_relative(path):
@@ -204,16 +206,16 @@ def flake8(parser, args):
os.path.join(spack.prefix, path.group(1)), os.getcwd())
for line in output.split('\n'):
- print re.sub(r'^(.*): \[', cwd_relative, line)
+ print(re.sub(r'^(.*): \[', cwd_relative, line))
if flake8.returncode != 0:
- print "Flake8 found errors."
+ print("Flake8 found errors.")
sys.exit(1)
else:
- print "Flake8 checks were clean."
+ print("Flake8 checks were clean.")
finally:
if args.keep_temp:
- print "temporary files are in ", temp
+ print("temporary files are in ", temp)
else:
shutil.rmtree(temp, ignore_errors=True)
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index 414b6d78ec..ee401d8fb7 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -22,8 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from __future__ import print_function
+import argparse
import llnl.util.tty as tty
import spack
@@ -96,5 +97,5 @@ def graph(parser, args):
elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]:
- print # extra line bt/w independent graphs
+ print() # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 1dd0ee4e78..799471ffcc 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -22,7 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import textwrap
+
from llnl.util.tty.colify import *
import spack
import spack.fetch_strategy as fs
@@ -50,12 +53,12 @@ def print_text_info(pkg):
"""Print out a plain text description of a package."""
header = "{0}: ".format(pkg.build_system_class)
- print header, pkg.name
+ print(header, pkg.name)
whitespaces = ''.join([' '] * (len(header) - len("Homepage: ")))
- print "Homepage:", whitespaces, pkg.homepage
+ print("Homepage:", whitespaces, pkg.homepage)
- print
- print "Safe versions: "
+ print()
+ print("Safe versions: ")
if not pkg.versions:
print(" None")
@@ -63,20 +66,20 @@ def print_text_info(pkg):
pad = padder(pkg.versions, 4)
for v in reversed(sorted(pkg.versions)):
f = fs.for_package_version(pkg, v)
- print " %s%s" % (pad(v), str(f))
+ print(" %s%s" % (pad(v), str(f)))
- print
- print "Variants:"
+ print()
+ print("Variants:")
if not pkg.variants:
- print " None"
+ print(" None")
else:
pad = padder(pkg.variants, 4)
maxv = max(len(v) for v in sorted(pkg.variants))
fmt = "%%-%ss%%-10s%%s" % (maxv + 4)
- print " " + fmt % ('Name', 'Default', 'Description')
- print
+ print(" " + fmt % ('Name', 'Default', 'Description'))
+ print()
for name in sorted(pkg.variants):
v = pkg.variants[name]
default = 'on' if v.default else 'off'
@@ -85,26 +88,26 @@ def print_text_info(pkg):
lines[1:] = [" " + (" " * maxv) + l for l in lines[1:]]
desc = "\n".join(lines)
- print " " + fmt % (name, default, desc)
+ print(" " + fmt % (name, default, desc))
- print
- print "Installation Phases:"
+ print()
+ print("Installation Phases:")
phase_str = ''
for phase in pkg.phases:
phase_str += " {0}".format(phase)
- print phase_str
+ print(phase_str)
for deptype in ('build', 'link', 'run'):
- print
- print "%s Dependencies:" % deptype.capitalize()
+ print()
+ print("%s Dependencies:" % deptype.capitalize())
deps = sorted(pkg.dependencies_of_type(deptype))
if deps:
colify(deps, indent=4)
else:
- print " None"
+ print(" None")
- print
- print "Virtual Packages: "
+ print()
+ print("Virtual Packages: ")
if pkg.provided:
inverse_map = {}
for spec, whens in pkg.provided.items():
@@ -113,17 +116,17 @@ def print_text_info(pkg):
inverse_map[when] = set()
inverse_map[when].add(spec)
for when, specs in reversed(sorted(inverse_map.items())):
- print " %s provides %s" % (
- when, ', '.join(str(s) for s in specs))
+ print(" %s provides %s" % (
+ when, ', '.join(str(s) for s in specs)))
else:
- print " None"
+ print(" None")
- print
- print "Description:"
+ print()
+ print("Description:")
if pkg.__doc__:
- print pkg.format_doc(indent=4)
+ print(pkg.format_doc(indent=4))
else:
- print " None"
+ print(" None")
def info(parser, args):
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index b5b699dccd..bcfb092945 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -22,12 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import argparse
import cgi
import fnmatch
import re
import sys
-from StringIO import StringIO
+from six import StringIO
import llnl.util.tty as tty
import spack
@@ -123,42 +125,42 @@ def rst(pkgs):
pkgs = [spack.repo.get(name) for name in pkg_names]
print('.. _package-list:')
- print('')
+ print()
print('============')
print('Package List')
print('============')
- print('')
+ print()
print('This is a list of things you can install using Spack. It is')
print('automatically generated based on the packages in the latest Spack')
print('release.')
- print('')
+ print()
print('Spack currently has %d mainline packages:' % len(pkgs))
- print('')
+ print()
print(rst_table('`%s`_' % p for p in pkg_names))
- print('')
+ print()
# Output some text for each package.
for pkg in pkgs:
print('-----')
- print('')
+ print()
print('.. _%s:' % pkg.name)
- print('')
+ print()
# Must be at least 2 long, breaks for single letter packages like R.
print('-' * max(len(pkg.name), 2))
print(pkg.name)
print('-' * max(len(pkg.name), 2))
- print('')
+ print()
print('Homepage:')
print(' * `%s <%s>`__' % (cgi.escape(pkg.homepage), pkg.homepage))
- print('')
+ print()
print('Spack package:')
print(' * `%s/package.py <%s>`__' % (pkg.name, github_url(pkg)))
- print('')
+ print()
if pkg.versions:
print('Versions:')
print(' ' + ', '.join(str(v) for v in
reversed(sorted(pkg.versions))))
- print('')
+ print()
for deptype in spack.alldeps:
deps = pkg.dependencies_of_type(deptype)
@@ -166,11 +168,11 @@ def rst(pkgs):
print('%s Dependencies' % deptype.capitalize())
print(' ' + ', '.join('%s_' % d if d in pkg_names
else d for d in deps))
- print('')
+ print()
print('Description:')
print(pkg.format_doc(indent=2))
- print('')
+ print()
def list(parser, args):
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index c82b7072f9..d1a7825630 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -22,8 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from __future__ import print_function
+import argparse
import llnl.util.tty as tty
import spack
@@ -70,16 +71,16 @@ def setup_parser(subparser):
def location(parser, args):
if args.module_dir:
- print spack.module_path
+ print(spack.module_path)
elif args.spack_root:
- print spack.prefix
+ print(spack.prefix)
elif args.packages:
- print spack.repo.first_repo().root
+ print(spack.repo.first_repo().root)
elif args.stages:
- print spack.stage_path
+ print(spack.stage_path)
else:
specs = spack.cmd.parse_specs(args.spec)
@@ -91,14 +92,14 @@ def location(parser, args):
if args.install_dir:
# install_dir command matches against installed specs.
spec = spack.cmd.disambiguate_spec(specs[0])
- print spec.prefix
+ print(spec.prefix)
else:
spec = specs[0]
if args.package_dir:
# This one just needs the spec name.
- print spack.repo.dirname_for_package_name(spec.name)
+ print(spack.repo.dirname_for_package_name(spec.name))
else:
# These versions need concretized specs.
@@ -106,11 +107,11 @@ def location(parser, args):
pkg = spack.repo.get(spec)
if args.stage_dir:
- print pkg.stage.path
+ print(pkg.stage.path)
else: # args.build_dir is the default.
if not pkg.stage.source_path:
tty.die("Build directory does not exist yet. "
"Run this to create it:",
"spack stage " + " ".join(args.spec))
- print pkg.stage.source_path
+ print(pkg.stage.source_path)
diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py
index 7940d1327b..fc205cc693 100644
--- a/lib/spack/spack/cmd/md5.py
+++ b/lib/spack/spack/cmd/md5.py
@@ -25,7 +25,7 @@
import argparse
import hashlib
import os
-from urlparse import urlparse
+from six.moves.urllib.parse import urlparse
import llnl.util.tty as tty
import spack.util.crypto
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index 2db75a0b1f..528fcbfc3f 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -141,7 +141,7 @@ def mirror_list(args):
fmt = "%%-%ds%%s" % (max_len + 4)
for name in mirrors:
- print fmt % (name, mirrors[name])
+ print(fmt % (name, mirrors[name]))
def _read_specs_from_file(filename):
@@ -152,7 +152,7 @@ def _read_specs_from_file(filename):
s = Spec(string)
s.package
specs.append(s)
- except SpackError, e:
+ except SpackError as e:
tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
">>> " + string, str(e))
return specs
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index 45104a9ff2..12dcb81792 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import os
import argparse
@@ -71,13 +73,16 @@ def setup_parser(subparser):
help="revision to compare to rev1 (default is HEAD)")
-def get_git():
+def get_git(fatal=True):
# cd to spack prefix to do git operations
os.chdir(spack.prefix)
# If this is a non-git version of spack, give up.
if not os.path.isdir('.git'):
- tty.die("No git repo in %s. Can't use 'spack pkg'" % spack.prefix)
+ if fatal:
+ tty.die("No git repo in %s. Can't use 'spack pkg'" % spack.prefix)
+ else:
+ return None
return which("git", required=True)
@@ -118,13 +123,13 @@ def pkg_diff(args):
u1, u2 = diff_packages(args.rev1, args.rev2)
if u1:
- print "%s:" % args.rev1
+ print("%s:" % args.rev1)
colify(sorted(u1), indent=4)
if u1:
- print
+ print()
if u2:
- print "%s:" % args.rev2
+ print("%s:" % args.rev2)
colify(sorted(u2), indent=4)
diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py
index 1881654cac..dd75f148c2 100644
--- a/lib/spack/spack/cmd/repo.py
+++ b/lib/spack/spack/cmd/repo.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import os
import llnl.util.tty as tty
@@ -161,7 +163,7 @@ def repo_list(args):
max_ns_len = max(len(r.namespace) for r in repos)
for repo in repos:
fmt = "%%-%ds%%s" % (max_ns_len + 4)
- print fmt % (repo.namespace, repo.root)
+ print(fmt % (repo.namespace, repo.root))
def repo(parser, args):
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index 9eea404bc7..d89707f230 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -22,8 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
+from __future__ import print_function
+import argparse
import spack
import spack.cmd
import spack.cmd.common.arguments as arguments
@@ -69,20 +70,20 @@ def spec(parser, args):
# With -y, just print YAML to output.
if args.yaml:
spec.concretize()
- print spec.to_yaml()
+ print(spec.to_yaml())
continue
# Print some diagnostic info by default.
- print "Input spec"
- print "--------------------------------"
- print spec.tree(**kwargs)
+ print("Input spec")
+ print("--------------------------------")
+ print(spec.tree(**kwargs))
- print "Normalized"
- print "--------------------------------"
+ print("Normalized")
+ print("--------------------------------")
spec.normalize()
- print spec.tree(**kwargs)
+ print(spec.tree(**kwargs))
- print "Concretized"
- print "--------------------------------"
+ print("Concretized")
+ print("--------------------------------")
spec.concretize()
- print spec.tree(**kwargs)
+ print(spec.tree(**kwargs))
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index c569a1bc88..9384e3a9e6 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -22,12 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import sys
import os
import re
import argparse
import pytest
-from StringIO import StringIO
+from six import StringIO
from llnl.util.filesystem import *
from llnl.util.tty.colify import colify
@@ -79,7 +81,7 @@ def do_list(args, unknown_args):
output_lines.append(
os.path.basename(name).replace('.py', ''))
else:
- print indent + name
+ print(indent + name)
if args.list:
colify(output_lines)
diff --git a/lib/spack/spack/cmd/url.py b/lib/spack/spack/cmd/url.py
index 6823f0febd..1128e08a43 100644
--- a/lib/spack/spack/cmd/url.py
+++ b/lib/spack/spack/cmd/url.py
@@ -31,6 +31,7 @@ import spack
from llnl.util import tty
from spack.url import *
from spack.util.web import find_versions_of_archive
+from spack.util.naming import simplify_name
description = "debugging tool for url parsing"
@@ -66,19 +67,26 @@ def setup_parser(subparser):
'-n', '--incorrect-name', action='store_true',
help='only list urls for which the name was incorrectly parsed')
excl_args.add_argument(
+ '-N', '--correct-name', action='store_true',
+ help='only list urls for which the name was correctly parsed')
+ excl_args.add_argument(
'-v', '--incorrect-version', action='store_true',
help='only list urls for which the version was incorrectly parsed')
+ excl_args.add_argument(
+ '-V', '--correct-version', action='store_true',
+ help='only list urls for which the version was correctly parsed')
- # Test
+ # Summary
sp.add_parser(
- 'test', help='print a summary of how well we are parsing package urls')
+ 'summary',
+ help='print a summary of how well we are parsing package urls')
def url(parser, args):
action = {
- 'parse': url_parse,
- 'list': url_list,
- 'test': url_test
+ 'parse': url_parse,
+ 'list': url_list,
+ 'summary': url_summary
}
action[args.subcommand](args)
@@ -116,6 +124,10 @@ def url_parse(args):
tty.msg('Spidering for versions:')
versions = find_versions_of_archive(url)
+ if not versions:
+ print(' Found no versions for {0}'.format(name))
+ return
+
max_len = max(len(str(v)) for v in versions)
for v in sorted(versions):
@@ -145,7 +157,7 @@ def url_list(args):
return len(urls)
-def url_test(args):
+def url_summary(args):
# Collect statistics on how many URLs were correctly parsed
total_urls = 0
correct_names = 0
@@ -205,19 +217,19 @@ def url_test(args):
correct_versions, total_urls, correct_versions / total_urls))
print()
- tty.msg('Statistics on name regular expresions:')
+ tty.msg('Statistics on name regular expressions:')
print()
- print(' Index Count Regular Expresion')
+ print(' Index Count Regular Expression')
for ni in name_regex_dict:
print(' {0:>3}: {1:>6} r{2!r}'.format(
ni, name_count_dict[ni], name_regex_dict[ni]))
print()
- tty.msg('Statistics on version regular expresions:')
+ tty.msg('Statistics on version regular expressions:')
print()
- print(' Index Count Regular Expresion')
+ print(' Index Count Regular Expression')
for vi in version_regex_dict:
print(' {0:>3}: {1:>6} r{2!r}'.format(
vi, version_count_dict[vi], version_regex_dict[vi]))
@@ -257,22 +269,38 @@ def url_list_parsing(args, urls, url, pkg):
:rtype: set
"""
if url:
- if args.incorrect_name:
- # Only add URLs whose name was incorrectly parsed
+ if args.correct_name or args.incorrect_name:
+ # Attempt to parse the name
try:
name = parse_name(url)
- if not name_parsed_correctly(pkg, name):
+ if (args.correct_name and
+ name_parsed_correctly(pkg, name)):
+ # Add correctly parsed URLs
+ urls.add(url)
+ elif (args.incorrect_name and
+ not name_parsed_correctly(pkg, name)):
+ # Add incorrectly parsed URLs
urls.add(url)
except UndetectableNameError:
- urls.add(url)
- elif args.incorrect_version:
- # Only add URLs whose version was incorrectly parsed
+ if args.incorrect_name:
+ # Add incorrectly parsed URLs
+ urls.add(url)
+ elif args.correct_version or args.incorrect_version:
+ # Attempt to parse the version
try:
version = parse_version(url)
- if not version_parsed_correctly(pkg, version):
+ if (args.correct_version and
+ version_parsed_correctly(pkg, version)):
+ # Add correctly parsed URLs
+ urls.add(url)
+ elif (args.incorrect_version and
+ not version_parsed_correctly(pkg, version)):
+ # Add incorrectly parsed URLs
urls.add(url)
except UndetectableVersionError:
- urls.add(url)
+ if args.incorrect_version:
+ # Add incorrectly parsed URLs
+ urls.add(url)
else:
urls.add(url)
@@ -289,6 +317,8 @@ def name_parsed_correctly(pkg, name):
"""
pkg_name = pkg.name
+ name = simplify_name(name)
+
# After determining a name, `spack create` determines a build system.
# Some build systems prepend a special string to the front of the name.
# Since this can't be guessed from the URL, it would be unfair to say
@@ -311,9 +341,33 @@ def version_parsed_correctly(pkg, version):
:returns: True if the name was correctly parsed, else False
:rtype: bool
"""
+ version = remove_separators(version)
+
# If the version parsed from the URL is listed in a version()
# directive, we assume it was correctly parsed
for pkg_version in pkg.versions:
- if str(pkg_version) == str(version):
+ pkg_version = remove_separators(pkg_version)
+ if pkg_version == version:
return True
return False
+
+
+def remove_separators(version):
+ """Removes separator characters ('.', '_', and '-') from a version.
+
+ A version like 1.2.3 may be displayed as 1_2_3 in the URL.
+ Make sure 1.2.3, 1-2-3, 1_2_3, and 123 are considered equal.
+ Unfortunately, this also means that 1.23 and 12.3 are equal.
+
+ :param version: A version
+ :type version: str or Version
+ :returns: The version with all separator characters removed
+ :rtype: str
+ """
+ version = str(version)
+
+ version = version.replace('.', '')
+ version = version.replace('_', '')
+ version = version.replace('-', '')
+
+ return version
diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py
index dacca2489b..a6f6805fb0 100644
--- a/lib/spack/spack/cmd/versions.py
+++ b/lib/spack/spack/cmd/versions.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack
@@ -47,10 +49,10 @@ def versions(parser, args):
tty.msg("Remote versions (not yet checksummed):")
if not remote_versions:
if not fetched_versions:
- print " Found no versions for %s" % pkg.name
+ print(" Found no versions for %s" % pkg.name)
tty.debug("Check the list_url and list_depth attribute on the "
"package to help Spack find versions.")
else:
- print " Found no unckecksummed versions for %s" % pkg.name
+ print(" Found no unchecksummed versions for %s" % pkg.name)
else:
colify(sorted(remote_versions, reverse=True), indent=2)
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 9e9c7cbcb4..90af900d0d 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -265,11 +265,11 @@ class Compiler(object):
full_path, prefix, suffix = key
version = detect_version(full_path)
return (version, prefix, suffix, full_path)
- except ProcessError, e:
+ except ProcessError as e:
tty.debug(
"Couldn't get version for compiler %s" % full_path, e)
return None
- except Exception, e:
+ except Exception as e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
tty.debug("Error while executing candidate compiler %s"
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index a16caa3a6c..585df23320 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -335,7 +335,7 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
scope_to_compilers[scope] = compilers
cfg_file_to_duplicates = dict()
- for scope, compilers in scope_to_compilers.iteritems():
+ for scope, compilers in scope_to_compilers.items():
config_file = config_scopes[scope].get_section_filename('compilers')
cfg_file_to_duplicates[config_file] = compilers
@@ -401,7 +401,7 @@ class CompilerDuplicateError(spack.error.SpackError):
config_file_to_duplicates = get_compiler_duplicates(
compiler_spec, arch_spec)
duplicate_table = list(
- (x, len(y)) for x, y in config_file_to_duplicates.iteritems())
+ (x, len(y)) for x, y in config_file_to_duplicates.items())
descriptor = lambda num: 'time' if num == 1 else 'times'
duplicate_msg = (
lambda cfgfile, count: "{0}: {1} {2}".format(
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 126db8b780..2a5ce65fa4 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -34,86 +34,78 @@ TODO: make this customizable and allow users to configure
concretization policies.
"""
from __future__ import print_function
+from six import iteritems
+from spack.version import *
+from itertools import chain
+from ordereddict_backport import OrderedDict
+from functools_backport import reverse_order
+
import spack
import spack.spec
import spack.compilers
import spack.architecture
import spack.error
-from spack.version import *
-from functools import partial
-from itertools import chain
from spack.package_prefs import *
class DefaultConcretizer(object):
-
"""This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the
default concretization strategies, or you can override all of them.
"""
-
def _valid_virtuals_and_externals(self, spec):
"""Returns a list of candidate virtual dep providers and external
- packages that coiuld be used to concretize a spec."""
+ packages that coiuld be used to concretize a spec.
+
+ Preferred specs come first in the list.
+ """
# First construct a list of concrete candidates to replace spec with.
candidates = [spec]
+ pref_key = lambda spec: 0 # no-op pref key
+
if spec.virtual:
- providers = spack.repo.providers_for(spec)
- if not providers:
- raise UnsatisfiableProviderSpecError(providers[0], spec)
- spec_w_preferred_providers = find_spec(
- spec,
- lambda x: pkgsort().spec_has_preferred_provider(
- x.name, spec.name))
- if not spec_w_preferred_providers:
- spec_w_preferred_providers = spec
- provider_cmp = partial(pkgsort().provider_compare,
- spec_w_preferred_providers.name,
- spec.name)
- candidates = sorted(providers, cmp=provider_cmp)
+ candidates = spack.repo.providers_for(spec)
+ if not candidates:
+ raise UnsatisfiableProviderSpecError(candidates[0], spec)
+
+ # Find nearest spec in the DAG (up then down) that has prefs.
+ spec_w_prefs = find_spec(
+ spec, lambda p: PackagePrefs.has_preferred_providers(
+ p.name, spec.name),
+ spec) # default to spec itself.
+
+ # Create a key to sort candidates by the prefs we found
+ pref_key = PackagePrefs(spec_w_prefs.name, 'providers', spec.name)
# For each candidate package, if it has externals, add those
# to the usable list. if it's not buildable, then *only* add
# the externals.
- usable = []
+ #
+ # Use an OrderedDict to avoid duplicates (use it like a set)
+ usable = OrderedDict()
for cspec in candidates:
if is_spec_buildable(cspec):
- usable.append(cspec)
+ usable[cspec] = True
+
externals = spec_externals(cspec)
for ext in externals:
if ext.satisfies(spec):
- usable.append(ext)
+ usable[ext] = True
# If nothing is in the usable list now, it's because we aren't
# allowed to build anything.
if not usable:
raise NoBuildError(spec)
- def cmp_externals(a, b):
- if a.name != b.name and (not a.external or a.external_module and
- not b.external and b.external_module):
- # We're choosing between different providers, so
- # maintain order from provider sort
- index_of_a = next(i for i in range(0, len(candidates))
- if a.satisfies(candidates[i]))
- index_of_b = next(i for i in range(0, len(candidates))
- if b.satisfies(candidates[i]))
- return index_of_a - index_of_b
-
- result = cmp_specs(a, b)
- if result != 0:
- return result
-
- # prefer external packages to internal packages.
- if a.external is None or b.external is None:
- return -cmp(a.external, b.external)
- else:
- return cmp(a.external, b.external)
-
- usable.sort(cmp=cmp_externals)
- return usable
+ # Use a sort key to order the results
+ return sorted(usable, key=lambda spec: (
+ not (spec.external or spec.external_module), # prefer externals
+ pref_key(spec), # respect prefs
+ spec.name, # group by name
+ reverse_order(spec.versions), # latest version
+ spec # natural order
+ ))
- # XXX(deptypes): Look here.
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
@@ -124,25 +116,16 @@ class DefaultConcretizer(object):
# Find the nearest spec in the dag that has a compiler. We'll
# use that spec to calibrate compiler compatibility.
- abi_exemplar = find_spec(spec, lambda x: x.compiler)
- if not abi_exemplar:
- abi_exemplar = spec.root
-
- # Make a list including ABI compatibility of specs with the exemplar.
- strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates]
- loose = [spack.abi.compatible(c, abi_exemplar, loose=True)
- for c in candidates]
- keys = zip(strict, loose, candidates)
+ abi_exemplar = find_spec(spec, lambda x: x.compiler, spec.root)
# Sort candidates from most to least compatibility.
- # Note:
- # 1. We reverse because True > False.
- # 2. Sort is stable, so c's keep their order.
- keys.sort(key=lambda k: k[:2], reverse=True)
-
- # Pull the candidates back out and return them in order
- candidates = [c for s, l, c in keys]
- return candidates
+ # We reverse because True > False.
+ # Sort is stable, so candidates keep their order.
+ return sorted(candidates,
+ reverse=True,
+ key=lambda spec: (
+ spack.abi.compatible(spec, abi_exemplar, loose=True),
+ spack.abi.compatible(spec, abi_exemplar)))
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
@@ -162,26 +145,12 @@ class DefaultConcretizer(object):
if spec.versions.concrete:
return False
- # If there are known available versions, return the most recent
- # version that satisfies the spec
+ # List of versions we could consider, in sorted order
pkg = spec.package
+ usable = [v for v in pkg.versions
+ if any(v.satisfies(sv) for sv in spec.versions)]
- # ---------- Produce prioritized list of versions
- # Get list of preferences from packages.yaml
- preferred = pkgsort()
- # NOTE: pkgsort() == spack.package_prefs.PreferredPackages()
-
- yaml_specs = [
- x[0] for x in
- preferred._spec_for_pkgname(spec.name, 'version', None)]
- n = len(yaml_specs)
- yaml_index = dict(
- [(spc, n - index) for index, spc in enumerate(yaml_specs)])
-
- # List of versions we could consider, in sorted order
- unsorted_versions = [
- v for v in pkg.versions
- if any(v.satisfies(sv) for sv in spec.versions)]
+ yaml_prefs = PackagePrefs(spec.name, 'version')
# The keys below show the order of precedence of factors used
# to select a version when concretizing. The item with
@@ -189,12 +158,11 @@ class DefaultConcretizer(object):
#
# NOTE: When COMPARING VERSIONS, the '@develop' version is always
# larger than other versions. BUT when CONCRETIZING,
- # the largest NON-develop version is selected by
- # default.
- keys = [(
+ # the largest NON-develop version is selected by default.
+ keyfn = lambda v: (
# ------- Special direction from the user
# Respect order listed in packages.yaml
- yaml_index.get(v, -1),
+ -yaml_prefs(v),
# The preferred=True flag (packages or packages.yaml or both?)
pkg.versions.get(Version(v)).get('preferred', False),
@@ -209,15 +177,11 @@ class DefaultConcretizer(object):
# a) develop > everything (disabled by "not v.isdevelop() above)
# b) numeric > non-numeric
# c) Numeric or string comparison
- v) for v in unsorted_versions]
- keys.sort(reverse=True)
+ v)
+ usable.sort(key=keyfn, reverse=True)
- # List of versions in complete sorted order
- valid_versions = [x[-1] for x in keys]
- # --------------------------
-
- if valid_versions:
- spec.versions = ver([valid_versions[0]])
+ if usable:
+ spec.versions = ver([usable[0]])
else:
# We don't know of any SAFE versions that match the given
# spec. Grab the spec's versions and grab the highest
@@ -241,7 +205,7 @@ class DefaultConcretizer(object):
def concretize_architecture(self, spec):
"""If the spec is empty provide the defaults of the platform. If the
- architecture is not a basestring, then check if either the platform,
+ architecture is not a string type, then check if either the platform,
target or operating system are concretized. If any of the fields are
changed then return True. If everything is concretized (i.e the
architecture attribute is a namedtuple of classes) then return False.
@@ -258,16 +222,20 @@ class DefaultConcretizer(object):
spec.architecture = spack.spec.ArchSpec(sys_arch)
spec_changed = True
- default_archs = [root_arch, sys_arch]
- while not spec.architecture.concrete and default_archs:
- arch = default_archs.pop(0)
+ default_archs = list(x for x in [root_arch, sys_arch] if x)
+ for arch in default_archs:
+ if spec.architecture.concrete:
+ break
- replacement_fields = [k for k, v in arch.to_cmp_dict().iteritems()
+ replacement_fields = [k for k, v in iteritems(arch.to_cmp_dict())
if v and not getattr(spec.architecture, k)]
for field in replacement_fields:
setattr(spec.architecture, field, getattr(arch, field))
spec_changed = True
+ if not spec.architecture.concrete:
+ raise InsufficientArchitectureInfoError(spec, default_archs)
+
return spec_changed
def concretize_variants(self, spec):
@@ -276,16 +244,15 @@ class DefaultConcretizer(object):
the package specification.
"""
changed = False
- preferred_variants = pkgsort().spec_preferred_variants(
- spec.package_class.name)
+ preferred_variants = PackagePrefs.preferred_variants(spec.name)
for name, variant in spec.package_class.variants.items():
if name not in spec.variants:
changed = True
if name in preferred_variants:
spec.variants[name] = preferred_variants.get(name)
else:
- spec.variants[name] = \
- spack.spec.VariantSpec(name, variant.default)
+ spec.variants[name] = spack.spec.VariantSpec(
+ name, variant.default)
return changed
def concretize_compiler(self, spec):
@@ -327,12 +294,9 @@ class DefaultConcretizer(object):
spec.compiler, spec.architecture)
return False
- # Find the another spec that has a compiler, or the root if none do
+ # Find another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(
- spec, lambda x: x.compiler)
-
- if not other_spec:
- other_spec = spec.root
+ spec, lambda x: x.compiler, spec.root)
other_compiler = other_spec.compiler
assert(other_spec)
@@ -351,9 +315,9 @@ class DefaultConcretizer(object):
if not compiler_list:
# No compiler with a satisfactory spec was found
raise UnavailableCompilerVersionError(other_compiler)
- cmp_compilers = partial(
- pkgsort().compiler_compare, other_spec.name)
- matches = sorted(compiler_list, cmp=cmp_compilers)
+
+ ppk = PackagePrefs(other_spec.name, 'compiler')
+ matches = sorted(compiler_list, key=ppk)
# copy concrete version into other_compiler
try:
@@ -418,7 +382,7 @@ class DefaultConcretizer(object):
return ret
-def find_spec(spec, condition):
+def find_spec(spec, condition, default=None):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
@@ -445,7 +409,7 @@ def find_spec(spec, condition):
if condition(spec):
return spec
- return None # Nothing matched the condition.
+ return default # Nothing matched the condition; return default.
def _compiler_concretization_failure(compiler_spec, arch):
@@ -464,7 +428,7 @@ def _compiler_concretization_failure(compiler_spec, arch):
class NoCompilersForArchError(spack.error.SpackError):
def __init__(self, arch, available_os_targets):
err_msg = ("No compilers found"
- " for operating system %s and target %s."
+ " for operating system %s and target %s."
"\nIf previous installations have succeeded, the"
" operating system may have been updated." %
(arch.platform_os, arch.target))
@@ -483,7 +447,6 @@ class NoCompilersForArchError(spack.error.SpackError):
class UnavailableCompilerVersionError(spack.error.SpackError):
-
"""Raised when there is no available compiler that satisfies a
compiler spec."""
@@ -498,7 +461,6 @@ class UnavailableCompilerVersionError(spack.error.SpackError):
class NoValidVersionError(spack.error.SpackError):
-
"""Raised when there is no way to have a concrete version for a
particular spec."""
@@ -508,6 +470,17 @@ class NoValidVersionError(spack.error.SpackError):
% (spec.name, spec.versions))
+class InsufficientArchitectureInfoError(spack.error.SpackError):
+
+ """Raised when details on architecture cannot be collected from the
+ system"""
+
+ def __init__(self, spec, archs):
+ super(InsufficientArchitectureInfoError, self).__init__(
+ "Cannot determine necessary architecture information for '%s': %s"
+ % (spec.name, str(archs)))
+
+
class NoBuildError(spack.error.SpackError):
"""Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found"""
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 56c6421457..7c3d614aee 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -52,6 +52,8 @@ import copy
import os
import re
import sys
+from six import string_types
+from six import iteritems
import yaml
import jsonschema
@@ -108,7 +110,7 @@ def extend_with_default(validator_class):
"patternProperties"]
def set_defaults(validator, properties, instance, schema):
- for property, subschema in properties.iteritems():
+ for property, subschema in iteritems(properties):
if "default" in subschema:
instance.setdefault(property, subschema["default"])
for err in validate_properties(
@@ -116,10 +118,10 @@ def extend_with_default(validator_class):
yield err
def set_pp_defaults(validator, properties, instance, schema):
- for property, subschema in properties.iteritems():
+ for property, subschema in iteritems(properties):
if "default" in subschema:
if isinstance(instance, dict):
- for key, val in instance.iteritems():
+ for key, val in iteritems(instance):
if re.match(property, key) and val is None:
instance[key] = subschema["default"]
@@ -306,8 +308,8 @@ def _mark_overrides(data):
elif isinstance(data, dict):
marked = {}
- for key, val in data.iteritems():
- if isinstance(key, basestring) and key.endswith(':'):
+ for key, val in iteritems(data):
+ if isinstance(key, string_types) and key.endswith(':'):
key = syaml.syaml_str(key[:-1])
key.override = True
marked[key] = _mark_overrides(val)
@@ -348,7 +350,7 @@ def _merge_yaml(dest, source):
# Source dict is merged into dest.
elif they_are(dict):
- for sk, sv in source.iteritems():
+ for sk, sv in iteritems(source):
if override(sk) or sk not in dest:
# if sk ended with ::, or if it's new, completely override
dest[sk] = copy.copy(sv)
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index d3fc03fb40..c63da4cf2e 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -41,6 +41,8 @@ filesystem.
"""
import os
import socket
+from six import string_types
+from six import iteritems
from yaml.error import MarkedYAMLError, YAMLError
@@ -260,7 +262,7 @@ class Database(object):
raise ValueError("Invalid database format: %s" % format)
try:
- if isinstance(stream, basestring):
+ if isinstance(stream, string_types):
with open(stream, 'r') as f:
fdata = load(f)
else:
@@ -511,7 +513,7 @@ class Database(object):
new_spec, path, installed, ref_count=0, explicit=explicit)
# Connect dependencies from the DB to the new copy.
- for name, dep in spec.dependencies_dict(_tracked_deps).iteritems():
+ for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)):
dkey = dep.spec.dag_hash()
new_spec._add_dependency(self._data[dkey].spec, dep.deptypes)
self._data[dkey].ref_count += 1
@@ -619,13 +621,12 @@ class Database(object):
Return the specs of all packages that extend
the given spec
"""
- for s in self.query():
+ for spec in self.query():
try:
- if s.package.extends(extendee_spec):
- yield s.package
- except spack.repository.UnknownPackageError:
+ spack.store.layout.check_activated(extendee_spec, spec)
+ yield spec.package
+ except spack.directory_layout.NoSuchExtensionError:
continue
- # skips unknown packages
# TODO: conditional way to do this instead of catching exceptions
def query(self, query_spec=any, known=any, installed=True, explicit=any):
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 58eabb9e3b..e2219d1f49 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -51,6 +51,7 @@ import functools
import inspect
import os.path
import re
+from six import string_types
import llnl.util.lang
import spack
@@ -174,7 +175,7 @@ class DirectiveMetaMixin(type):
"""
global __all__
- if isinstance(dicts, basestring):
+ if isinstance(dicts, string_types):
dicts = (dicts, )
if not isinstance(dicts, collections.Sequence):
message = "dicts arg must be list, tuple, or string. Found {0}"
@@ -262,6 +263,33 @@ def _depends_on(pkg, spec, when=None, type=None):
conditions[when_spec] = dep_spec
+@directive('conflicts')
+def conflicts(conflict_spec, when=None):
+ """Allows a package to define a conflict, i.e. a concretized configuration
+ that is known to be non-valid.
+
+ For example a package that is known not to be buildable with intel
+ compilers can declare:
+
+ conflicts('%intel')
+
+ To express the same constraint only when the 'foo' variant is activated:
+
+ conflicts('%intel', when='+foo')
+
+ :param conflict_spec: constraint defining the known conflict
+ :param when: optional constraint that triggers the conflict
+ """
+ def _execute(pkg):
+ # If when is not specified the conflict always holds
+ condition = pkg.name if when is None else when
+ when_spec = parse_anonymous_spec(condition, pkg.name)
+
+ when_spec_list = pkg.conflicts.setdefault(conflict_spec, [])
+ when_spec_list.append(when_spec)
+ return _execute
+
+
@directive(('dependencies', 'dependency_types'))
def depends_on(spec, when=None, type=None):
"""Creates a dict of deps with specs defining when they apply.
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index b84ee3be5b..9d09875484 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import exceptions
import shutil
import glob
import tempfile
@@ -138,7 +137,7 @@ class DirectoryLayout(object):
if os.path.exists(path):
try:
shutil.rmtree(path)
- except exceptions.OSError as e:
+ except OSError as e:
raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path)
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index 1333054518..eadfa45efb 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -291,7 +291,7 @@ class EnvironmentModifications(object):
shell_options = '{shell_options}'.format(**info)
source_file = '{source_command} {file} {concatenate_on_success}'
- dump_cmd = "import os, json; print json.dumps(dict(os.environ))"
+ dump_cmd = "import os, json; print(json.dumps(dict(os.environ)))"
dump_environment = 'python -c "%s"' % dump_cmd
# Construct the command that will be executed
@@ -310,7 +310,7 @@ class EnvironmentModifications(object):
proc.wait()
if proc.returncode != 0:
raise RuntimeError('sourcing files returned a non-zero exit code')
- output = ''.join([line for line in proc.stdout])
+ output = ''.join([line.decode('utf-8') for line in proc.stdout])
# Construct a dictionaries of the environment before and after
# sourcing the files, so that we can diff them.
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index b6261a05f4..cd1ae5b25c 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -22,8 +22,11 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import os
import sys
+
import llnl.util.tty as tty
import spack
import inspect
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index d510db568f..38752b3fc1 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -46,6 +46,9 @@ import re
import shutil
import copy
from functools import wraps
+from six import string_types
+from six import with_metaclass
+
import llnl.util.tty as tty
from llnl.util.filesystem import *
import spack
@@ -74,21 +77,19 @@ def _needs_stage(fun):
return wrapper
-class FetchStrategy(object):
+class FSMeta(type):
+ """This metaclass registers all fetch strategies in a list."""
+ def __init__(cls, name, bases, dict):
+ type.__init__(cls, name, bases, dict)
+ if cls.enabled:
+ all_strategies.append(cls)
+
+class FetchStrategy(with_metaclass(FSMeta, object)):
"""Superclass of all fetch strategies."""
enabled = False # Non-abstract subclasses should be enabled.
required_attributes = None # Attributes required in version() args.
- class __metaclass__(type):
-
- """This metaclass registers all fetch strategies in a list."""
-
- def __init__(cls, name, bases, dict):
- type.__init__(cls, name, bases, dict)
- if cls.enabled:
- all_strategies.append(cls)
-
def __init__(self):
# The stage is initialized late, so that fetch strategies can be
# constructed at package construction time. This is where things
@@ -319,7 +320,7 @@ class URLFetchStrategy(FetchStrategy):
# top-level directory. We ignore hidden files to accomodate
# these "semi-exploding" tarballs.
files = os.listdir(tarball_container)
- non_hidden = filter(lambda f: not f.startswith('.'), files)
+ non_hidden = [f for f in files if not f.startswith('.')]
if len(non_hidden) == 1:
expanded_dir = os.path.join(tarball_container, non_hidden[0])
if os.path.isdir(expanded_dir):
@@ -461,7 +462,7 @@ class VCSFetchStrategy(FetchStrategy):
patterns = kwargs.get('exclude', None)
if patterns is not None:
- if isinstance(patterns, basestring):
+ if isinstance(patterns, string_types):
patterns = [patterns]
for p in patterns:
tar.add_default_arg('--exclude=%s' % p)
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 91230263f1..04e6cc7fca 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -63,6 +63,7 @@ can take a number of specs as input.
"""
from heapq import *
+from six import iteritems
from llnl.util.lang import *
from llnl.util.tty.color import *
@@ -562,7 +563,7 @@ def graph_dot(specs, deptype=None, static=False, out=None):
continue
# Add edges for each depends_on in the package.
- for dep_name, dep in spec.package.dependencies.iteritems():
+ for dep_name, dep in iteritems(spec.package.dependencies):
deps.add((spec.name, dep_name))
# If the package provides something, add an edge for that.
diff --git a/lib/spack/spack/hooks/case_consistency.py b/lib/spack/spack/hooks/case_consistency.py
index faf38f7ae3..2b88291666 100644
--- a/lib/spack/spack/hooks/case_consistency.py
+++ b/lib/spack/spack/hooks/case_consistency.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from __future__ import absolute_import
+
import os
import re
import platform
@@ -30,6 +31,7 @@ import platform
from llnl.util.filesystem import *
import spack
+from spack.cmd.pkg import get_git
from spack.util.executable import *
@@ -60,8 +62,8 @@ def git_case_consistency_check(path):
"""
with working_dir(path):
# Don't bother fixing case if Spack isn't in a git repository
- git = which('git')
- if not git:
+ git = get_git(fatal=False)
+ if git is None:
return
try:
diff --git a/lib/spack/spack/hooks/module_file_generation.py b/lib/spack/spack/hooks/module_file_generation.py
index 445cea4e91..ff9617ff1c 100644
--- a/lib/spack/spack/hooks/module_file_generation.py
+++ b/lib/spack/spack/hooks/module_file_generation.py
@@ -23,15 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack.modules
+from six import iteritems
def post_install(pkg):
- for item, cls in spack.modules.module_types.iteritems():
+ for item, cls in iteritems(spack.modules.module_types):
generator = cls(pkg.spec)
generator.write()
def post_uninstall(pkg):
- for item, cls in spack.modules.module_types.iteritems():
+ for item, cls in iteritems(spack.modules.module_types):
generator = cls(pkg.spec)
generator.remove()
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index a6ffded935..8c702f1111 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -46,6 +46,8 @@ import os.path
import re
import string
import textwrap
+from six import iteritems
+from six import with_metaclass
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
@@ -213,7 +215,7 @@ def parse_config_options(module_generator):
for x in arglist:
yield (x, )
else:
- for x in arglist.iteritems():
+ for x in iteritems(arglist):
yield x
for method, arglist in environment_actions.items():
@@ -246,17 +248,17 @@ def format_env_var_name(name):
return name.replace('-', '_').upper()
-class EnvModule(object):
- name = 'env_module'
- formats = {}
+class ModuleMeta(type):
+ """Metaclass registers modules in themodule_types dict."""
+ def __init__(cls, name, bases, dict):
+ type.__init__(cls, name, bases, dict)
+ if cls.name != 'env_module' and cls.name in _module_config['enable']:
+ module_types[cls.name] = cls
- class __metaclass__(type):
- def __init__(cls, name, bases, dict):
- type.__init__(cls, name, bases, dict)
- if cls.name != 'env_module' and cls.name in _module_config[
- 'enable']:
- module_types[cls.name] = cls
+class EnvModule(with_metaclass(ModuleMeta, object)):
+ name = 'env_module'
+ formats = {}
def __init__(self, spec=None):
self.spec = spec
diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py
index 7acab1cbcb..b5c759bbcb 100644
--- a/lib/spack/spack/operating_systems/cnl.py
+++ b/lib/spack/spack/operating_systems/cnl.py
@@ -54,7 +54,7 @@ class Cnl(OperatingSystem):
# ensure all the version calls we made are cached in the parent
# process, as well. This speeds up Spack a lot.
- clist = reduce(lambda x, y: x + y, compiler_lists)
+ clist = [comp for cl in compiler_lists for comp in cl]
return clist
def find_compiler(self, cmp_cls, *paths):
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 80d65bd739..177b4c908b 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -42,6 +42,9 @@ import re
import sys
import textwrap
import time
+from six import StringIO
+from six import string_types
+from six import with_metaclass
import llnl.util.lock
import llnl.util.tty as tty
@@ -56,7 +59,7 @@ import spack.mirror
import spack.repository
import spack.url
import spack.util.web
-from StringIO import StringIO
+
from llnl.util.filesystem import *
from llnl.util.lang import *
from llnl.util.link_tree import LinkTree
@@ -238,7 +241,7 @@ def on_package_attributes(**attr_dict):
return _execute_under_condition
-class PackageBase(object):
+class PackageBase(with_metaclass(PackageMeta, object)):
"""This is the superclass for all spack packages.
***The Package class***
@@ -475,7 +478,6 @@ class PackageBase(object):
Package creators override functions like install() (all of them do this),
clean() (some of them do this), and others to provide custom behavior.
"""
- __metaclass__ = PackageMeta
#
# These are default values for instance variables.
#
@@ -568,7 +570,7 @@ class PackageBase(object):
self.list_url = None
if not hasattr(self, 'list_depth'):
- self.list_depth = 1
+ self.list_depth = 0
# Set default licensing information
if not hasattr(self, 'license_required'):
@@ -964,6 +966,10 @@ class PackageBase(object):
self.stage.expand_archive()
self.stage.chdir_to_source()
+ def patch(self):
+ """Default patch implementation is a no-op."""
+ pass
+
def do_patch(self):
"""Calls do_stage(), then applied patches to the expanded tarball if they
haven't been applied already."""
@@ -1115,6 +1121,13 @@ class PackageBase(object):
finally:
self.prefix_lock.release_write()
+ @contextlib.contextmanager
+ def _stage_and_write_lock(self):
+ """Prefix lock nested in a stage."""
+ with self.stage:
+ with self._prefix_write_lock():
+ yield
+
def do_install(self,
keep_prefix=False,
keep_stage=False,
@@ -1233,7 +1246,7 @@ class PackageBase(object):
self.stage.keep = keep_stage
- with contextlib.nested(self.stage, self._prefix_write_lock()):
+ with self._stage_and_write_lock():
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
@@ -1265,9 +1278,10 @@ class PackageBase(object):
input_stream=input_stream
)
with redirection_context as log_redirection:
- for phase_name, phase in zip(self.phases, self._InstallPhase_phases): # NOQA: ignore=E501
+ for phase_name, phase in zip(
+ self.phases, self._InstallPhase_phases):
tty.msg(
- 'Executing phase : \'{0}\''.format(phase_name) # NOQA: ignore=E501
+ 'Executing phase : \'{0}\''.format(phase_name)
)
# Redirect stdout and stderr to daemon pipe
with log_redirection:
@@ -1355,7 +1369,7 @@ class PackageBase(object):
"""This function checks whether install succeeded."""
def check_paths(path_list, filetype, predicate):
- if isinstance(path_list, basestring):
+ if isinstance(path_list, string_types):
path_list = [path_list]
for path in path_list:
@@ -1676,9 +1690,7 @@ class PackageBase(object):
try:
return spack.util.web.find_versions_of_archive(
- *self.all_urls,
- list_url=self.list_url,
- list_depth=self.list_depth)
+ self.all_urls, self.list_url, self.list_depth)
except spack.error.NoNetworkConnectionError as e:
tty.die("Package.fetch_versions couldn't connect to:", e.url,
e.message)
diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py
index 63f90d9b50..f9dac2bef0 100644
--- a/lib/spack/spack/package_prefs.py
+++ b/lib/spack/spack/package_prefs.py
@@ -22,12 +22,25 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from six import string_types
+from six import iteritems
+
+from llnl.util.lang import classproperty
import spack
import spack.error
from spack.version import *
+_lesser_spec_types = {'compiler': spack.spec.CompilerSpec,
+ 'version': VersionList}
+
+
+def _spec_type(component):
+ """Map from component name to spec type for package prefs."""
+ return _lesser_spec_types.get(component, spack.spec.Spec)
+
+
def get_packages_config():
"""Wrapper around get_packages_config() to validate semantics."""
config = spack.config.get_config('packages')
@@ -49,177 +62,141 @@ def get_packages_config():
return config
-class PreferredPackages(object):
- def __init__(self):
- self.preferred = get_packages_config()
- self._spec_for_pkgname_cache = {}
+class PackagePrefs(object):
+ """Defines the sort order for a set of specs.
+
+ Spack's package preference implementation uses PackagePrefss to
+ define sort order. The PackagePrefs class looks at Spack's
+ packages.yaml configuration and, when called on a spec, returns a key
+ that can be used to sort that spec in order of the user's
+ preferences.
+
+ You can use it like this:
+
+ # key function sorts CompilerSpecs for `mpich` in order of preference
+ kf = PackagePrefs('mpich', 'compiler')
+ compiler_list.sort(key=kf)
+
+ Or like this:
+
+ # key function to sort VersionLists for OpenMPI in order of preference.
+ kf = PackagePrefs('openmpi', 'version')
+ version_list.sort(key=kf)
- # Given a package name, sort component (e.g, version, compiler, ...), and
- # a second_key (used by providers), return the list
- def _order_for_package(self, pkgname, component, second_key,
- test_all=True):
+ Optionally, you can sort in order of preferred virtual dependency
+ providers. To do that, provide 'providers' and a third argument
+ denoting the virtual package (e.g., ``mpi``):
+
+ kf = PackagePrefs('trilinos', 'providers', 'mpi')
+ provider_spec_list.sort(key=kf)
+
+ """
+ _packages_config_cache = None
+ _spec_cache = {}
+
+ def __init__(self, pkgname, component, vpkg=None):
+ self.pkgname = pkgname
+ self.component = component
+ self.vpkg = vpkg
+
+ def __call__(self, spec):
+ """Return a key object (an index) that can be used to sort spec.
+
+ Sort is done in package order. We don't cache the result of
+ this function as Python's sort functions already ensure that the
+ key function is called at most once per sorted element.
+ """
+ spec_order = self._specs_for_pkg(
+ self.pkgname, self.component, self.vpkg)
+
+ # integer is the index of the first spec in order that satisfies
+ # spec, or it's a number larger than any position in the order.
+ return next(
+ (i for i, s in enumerate(spec_order) if spec.satisfies(s)),
+ len(spec_order))
+
+ @classproperty
+ @classmethod
+ def _packages_config(cls):
+ if cls._packages_config_cache is None:
+ cls._packages_config_cache = get_packages_config()
+ return cls._packages_config_cache
+
+ @classmethod
+ def _order_for_package(cls, pkgname, component, vpkg=None, all=True):
+ """Given a package name, sort component (e.g, version, compiler, ...),
+ and an optional vpkg, return the list from the packages config.
+ """
pkglist = [pkgname]
- if test_all:
+ if all:
pkglist.append('all')
+
for pkg in pkglist:
- order = self.preferred.get(pkg, {}).get(component, {})
- if isinstance(order, dict) and second_key:
- order = order.get(second_key, {})
+ pkg_entry = cls._packages_config.get(pkg)
+ if not pkg_entry:
+ continue
+
+ order = pkg_entry.get(component)
if not order:
continue
- return [str(s).strip() for s in order]
+
+ # vpkg is one more level
+ if vpkg is not None:
+ order = order.get(vpkg)
+
+ if order:
+ return [str(s).strip() for s in order]
+
return []
- # A generic sorting function. Given a package name and sort
- # component, return less-than-0, 0, or greater-than-0 if
- # a is respectively less-than, equal to, or greater than b.
- def _component_compare(self, pkgname, component, a, b,
- reverse_natural_compare, second_key):
- if a is None:
- return -1
- if b is None:
- return 1
- orderlist = self._order_for_package(pkgname, component, second_key)
- a_in_list = str(a) in orderlist
- b_in_list = str(b) in orderlist
- if a_in_list and not b_in_list:
- return -1
- elif b_in_list and not a_in_list:
- return 1
-
- cmp_a = None
- cmp_b = None
- reverse = None
- if not a_in_list and not b_in_list:
- cmp_a = a
- cmp_b = b
- reverse = -1 if reverse_natural_compare else 1
- else:
- cmp_a = orderlist.index(str(a))
- cmp_b = orderlist.index(str(b))
- reverse = 1
-
- if cmp_a < cmp_b:
- return -1 * reverse
- elif cmp_a > cmp_b:
- return 1 * reverse
- else:
- return 0
-
- # A sorting function for specs. Similar to component_compare, but
- # a and b are considered to match entries in the sorting list if they
- # satisfy the list component.
- def _spec_compare(self, pkgname, component, a, b,
- reverse_natural_compare, second_key):
- if not a or (not a.concrete and not second_key):
- return -1
- if not b or (not b.concrete and not second_key):
- return 1
- specs = self._spec_for_pkgname(pkgname, component, second_key)
- a_index = None
- b_index = None
- reverse = -1 if reverse_natural_compare else 1
- for i, cspec in enumerate(specs):
- if a_index is None and (cspec.satisfies(a) or a.satisfies(cspec)):
- a_index = i
- if b_index:
- break
- if b_index is None and (cspec.satisfies(b) or b.satisfies(cspec)):
- b_index = i
- if a_index:
- break
-
- if a_index is not None and b_index is None:
- return -1
- elif a_index is None and b_index is not None:
- return 1
- elif a_index is not None and b_index == a_index:
- return -1 * cmp(a, b)
- elif (a_index is not None and b_index is not None and
- a_index != b_index):
- return cmp(a_index, b_index)
- else:
- return cmp(a, b) * reverse
-
- # Given a sort order specified by the pkgname/component/second_key, return
- # a list of CompilerSpecs, VersionLists, or Specs for that sorting list.
- def _spec_for_pkgname(self, pkgname, component, second_key):
- key = (pkgname, component, second_key)
- if key not in self._spec_for_pkgname_cache:
- pkglist = self._order_for_package(pkgname, component, second_key)
- if component == 'compiler':
- self._spec_for_pkgname_cache[key] = \
- [spack.spec.CompilerSpec(s) for s in pkglist]
- elif component == 'version':
- self._spec_for_pkgname_cache[key] = \
- [VersionList(s) for s in pkglist]
- else:
- self._spec_for_pkgname_cache[key] = \
- [spack.spec.Spec(s) for s in pkglist]
- return self._spec_for_pkgname_cache[key]
-
- def provider_compare(self, pkgname, provider_str, a, b):
- """Return less-than-0, 0, or greater than 0 if a is respecively
- less-than, equal-to, or greater-than b. A and b are possible
- implementations of provider_str. One provider is less-than another
- if it is preferred over the other. For example,
- provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would
- return -1 if mvapich should be preferred over openmpi for scorep."""
- return self._spec_compare(pkgname, 'providers', a, b, False,
- provider_str)
-
- def spec_has_preferred_provider(self, pkgname, provider_str):
- """Return True iff the named package has a list of preferred
- providers"""
- return bool(self._order_for_package(pkgname, 'providers',
- provider_str, False))
-
- def spec_preferred_variants(self, pkgname):
- """Return a VariantMap of preferred variants and their values"""
- for pkg in (pkgname, 'all'):
- variants = self.preferred.get(pkg, {}).get('variants', '')
+ @classmethod
+ def _specs_for_pkg(cls, pkgname, component, vpkg=None):
+ """Given a sort order specified by the pkgname/component/second_key,
+ return a list of CompilerSpecs, VersionLists, or Specs for
+ that sorting list.
+ """
+ key = (pkgname, component, vpkg)
+
+ specs = cls._spec_cache.get(key)
+ if specs is None:
+ pkglist = cls._order_for_package(pkgname, component, vpkg)
+ spec_type = _spec_type(component)
+ specs = [spec_type(s) for s in pkglist]
+ cls._spec_cache[key] = specs
+
+ return specs
+
+ @classmethod
+ def clear_caches(cls):
+ cls._packages_config_cache = None
+ cls._spec_cache = {}
+
+ @classmethod
+ def has_preferred_providers(cls, pkgname, vpkg):
+ """Whether specific package has a preferred vpkg providers."""
+ return bool(cls._order_for_package(pkgname, 'providers', vpkg, False))
+
+ @classmethod
+ def preferred_variants(cls, pkg_name):
+ """Return a VariantMap of preferred variants/values for a spec."""
+ for pkg in (pkg_name, 'all'):
+ variants = cls._packages_config.get(pkg, {}).get('variants', '')
if variants:
break
- if not isinstance(variants, basestring):
+
+ # allow variants to be list or string
+ if not isinstance(variants, string_types):
variants = " ".join(variants)
- pkg = spack.repo.get(pkgname)
- spec = spack.spec.Spec("%s %s" % (pkgname, variants))
+
# Only return variants that are actually supported by the package
+ pkg = spack.repo.get(pkg_name)
+ spec = spack.spec.Spec("%s %s" % (pkg_name, variants))
return dict((name, variant) for name, variant in spec.variants.items()
if name in pkg.variants)
- def version_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if version a of pkgname is
- respectively less-than, equal-to, or greater-than version b of
- pkgname. One version is less-than another if it is preferred over
- the other."""
- return self._spec_compare(pkgname, 'version', a, b, True, None)
-
- def variant_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if variant a of pkgname is
- respectively less-than, equal-to, or greater-than variant b of
- pkgname. One variant is less-than another if it is preferred over
- the other."""
- return self._component_compare(pkgname, 'variant', a, b, False, None)
-
- def architecture_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if architecture a of pkgname
- is respectively less-than, equal-to, or greater-than architecture b
- of pkgname. One architecture is less-than another if it is preferred
- over the other."""
- return self._component_compare(pkgname, 'architecture', a, b,
- False, None)
-
- def compiler_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is
- respecively less-than, equal-to, or greater-than compiler b of
- pkgname. One compiler is less-than another if it is preferred over
- the other."""
- return self._spec_compare(pkgname, 'compiler', a, b, False, None)
-
def spec_externals(spec):
- """Return a list of external specs (with external directory path filled in),
+ """Return a list of external specs (w/external directory path filled in),
one for each known external installation."""
# break circular import.
from spack.build_environment import get_path_from_module
@@ -233,7 +210,7 @@ def spec_externals(spec):
if (not pkg_paths) and (not pkg_modules):
return []
- for external_spec, path in pkg_paths.iteritems():
+ for external_spec, path in iteritems(pkg_paths):
if not path:
# skip entries without paths (avoid creating extra Specs)
continue
@@ -242,7 +219,7 @@ def spec_externals(spec):
if external_spec.satisfies(spec):
external_specs.append(external_spec)
- for external_spec, module in pkg_modules.iteritems():
+ for external_spec, module in iteritems(pkg_modules):
if not module:
continue
@@ -253,7 +230,8 @@ def spec_externals(spec):
if external_spec.satisfies(spec):
external_specs.append(external_spec)
- return external_specs
+ # defensively copy returned specs
+ return [s.copy() for s in external_specs]
def is_spec_buildable(spec):
@@ -266,50 +244,5 @@ def is_spec_buildable(spec):
return allpkgs[spec.name]['buildable']
-def cmp_specs(lhs, rhs):
- # Package name sort order is not configurable, always goes alphabetical
- if lhs.name != rhs.name:
- return cmp(lhs.name, rhs.name)
-
- # Package version is second in compare order
- pkgname = lhs.name
- if lhs.versions != rhs.versions:
- return pkgsort().version_compare(
- pkgname, lhs.versions, rhs.versions)
-
- # Compiler is third
- if lhs.compiler != rhs.compiler:
- return pkgsort().compiler_compare(
- pkgname, lhs.compiler, rhs.compiler)
-
- # Variants
- if lhs.variants != rhs.variants:
- return pkgsort().variant_compare(
- pkgname, lhs.variants, rhs.variants)
-
- # Architecture
- if lhs.architecture != rhs.architecture:
- return pkgsort().architecture_compare(
- pkgname, lhs.architecture, rhs.architecture)
-
- # Dependency is not configurable
- lhash, rhash = hash(lhs), hash(rhs)
- if lhash != rhash:
- return -1 if lhash < rhash else 1
-
- # Equal specs
- return 0
-
-
-_pkgsort = None
-
-
-def pkgsort():
- global _pkgsort
- if _pkgsort is None:
- _pkgsort = PreferredPackages()
- return _pkgsort
-
-
class VirtualInPackagesYAMLError(spack.error.SpackError):
"""Raised when a disallowed virtual is found in packages.yaml"""
diff --git a/lib/spack/spack/package_test.py b/lib/spack/spack/package_test.py
index e366b5f0e5..54f424d45e 100644
--- a/lib/spack/spack/package_test.py
+++ b/lib/spack/spack/package_test.py
@@ -45,15 +45,15 @@ def compile_c_and_execute(source_file, include_flags, link_flags):
def compare_output(current_output, blessed_output):
"""Compare blessed and current output of executables."""
if not (current_output == blessed_output):
- print "Produced output does not match expected output."
- print "Expected output:"
- print '-' * 80
- print blessed_output
- print '-' * 80
- print "Produced output:"
- print '-' * 80
- print current_output
- print '-' * 80
+ print("Produced output does not match expected output.")
+ print("Expected output:")
+ print('-' * 80)
+ print(blessed_output)
+ print('-' * 80)
+ print("Produced output:")
+ print('-' * 80)
+ print(current_output)
+ print('-' * 80)
raise RuntimeError("Ouput check failed.",
"See spack_output.log for details")
diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py
index e116175823..880bb09b4e 100644
--- a/lib/spack/spack/parse.py
+++ b/lib/spack/spack/parse.py
@@ -25,6 +25,8 @@
import re
import shlex
import itertools
+from six import string_types
+
import spack.error
@@ -46,9 +48,8 @@ class Token:
def is_a(self, type):
return self.type == type
- def __cmp__(self, other):
- return cmp((self.type, self.value),
- (other.type, other.value))
+ def __eq__(self, other):
+ return (self.type == other.type) and (self.value == other.value)
class Lexer(object):
@@ -118,7 +119,7 @@ class Parser(object):
def gettok(self):
"""Puts the next token in the input stream into self.next."""
try:
- self.next = self.tokens.next()
+ self.next = next(self.tokens)
except StopIteration:
self.next = None
@@ -159,7 +160,7 @@ class Parser(object):
sys.exit(1)
def setup(self, text):
- if isinstance(text, basestring):
+ if isinstance(text, string_types):
text = shlex.split(text)
self.text = text
self.push_tokens(self.lexer.lex(text))
diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py
index 0e771c6255..8d64d100b1 100644
--- a/lib/spack/spack/provider_index.py
+++ b/lib/spack/spack/provider_index.py
@@ -26,6 +26,7 @@
The ``virtual`` module contains utility classes for virtual dependencies.
"""
from itertools import product as iproduct
+from six import iteritems
from pprint import pformat
import spack.util.spack_yaml as syaml
@@ -97,7 +98,7 @@ class ProviderIndex(object):
assert(not spec.virtual)
pkg = spec.package
- for provided_spec, provider_specs in pkg.provided.iteritems():
+ for provided_spec, provider_specs in iteritems(pkg.provided):
for provider_spec in provider_specs:
# TODO: fix this comment.
# We want satisfaction other than flags
@@ -145,8 +146,8 @@ class ProviderIndex(object):
if p_spec.satisfies(vspec, deps=False):
providers.update(spec_set)
- # Return providers in order
- return sorted(providers)
+ # Return providers in order. Defensively copy.
+ return sorted(s.copy() for s in providers)
# TODO: this is pretty darned nasty, and inefficient, but there
# are not that many vdeps in most specs.
@@ -201,7 +202,7 @@ class ProviderIndex(object):
def from_yaml(stream):
try:
yfile = syaml.load(stream)
- except MarkedYAMLError, e:
+ except MarkedYAMLError as e:
raise spack.spec.SpackYAMLError(
"error parsing YAML ProviderIndex cache:", str(e))
@@ -288,7 +289,7 @@ def _transform(providers, transform_fun, out_mapping_type=dict):
"""
def mapiter(mappings):
if isinstance(mappings, dict):
- return mappings.iteritems()
+ return iteritems(mappings)
else:
return iter(mappings)
diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py
index 1536ecb0e6..5486f7a9a4 100644
--- a/lib/spack/spack/repository.py
+++ b/lib/spack/spack/repository.py
@@ -26,7 +26,6 @@ import os
import stat
import shutil
import errno
-import exceptions
import sys
import inspect
import imp
@@ -558,7 +557,7 @@ class Repo(object):
return yaml_data['repo']
- except exceptions.IOError:
+ except IOError:
tty.die("Error reading %s when opening %s"
% (self.config_file, self.root))
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index b2fae9fd8e..534bc6c2d3 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -96,15 +96,16 @@ specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line.
"""
import base64
+import sys
import collections
-import csv
import ctypes
import hashlib
import itertools
from operator import attrgetter
+from six import StringIO
+from six import string_types
+from six import iteritems
-import cStringIO
-import llnl.util.tty as tty
import spack
import spack.architecture
import spack.compilers as compilers
@@ -113,7 +114,7 @@ import spack.parse
import spack.store
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
-from cStringIO import StringIO
+
from llnl.util.filesystem import find_libraries
from llnl.util.lang import *
from llnl.util.tty.color import *
@@ -157,6 +158,7 @@ __all__ = [
'UnsatisfiableDependencySpecError',
'AmbiguousHashError',
'InvalidHashError',
+ 'NoSuchHashError',
'RedundantSpecError']
# Valid pattern for an identifier in Spack
@@ -222,7 +224,7 @@ def canonical_deptype(deptype):
if deptype is None:
return alldeps
- elif isinstance(deptype, str):
+ elif isinstance(deptype, string_types):
return special_types.get(deptype, (deptype,))
elif isinstance(deptype, (tuple, list)):
@@ -270,7 +272,7 @@ class ArchSpec(object):
spec_like = args[0]
if isinstance(spec_like, ArchSpec):
self._dup(spec_like)
- elif isinstance(spec_like, basestring):
+ elif isinstance(spec_like, string_types):
spec_fields = spec_like.split("-")
if len(spec_fields) == 3:
@@ -391,7 +393,7 @@ class ArchSpec(object):
raise UnsatisfiableArchitectureSpecError(self, other)
constrained = False
- for attr, svalue in self.to_cmp_dict().iteritems():
+ for attr, svalue in iteritems(self.to_cmp_dict()):
ovalue = getattr(other, attr)
if svalue is None and ovalue is not None:
setattr(self, attr, ovalue)
@@ -406,7 +408,7 @@ class ArchSpec(object):
@property
def concrete(self):
- return all(v for k, v in self.to_cmp_dict().iteritems())
+ return all(v for k, v in iteritems(self.to_cmp_dict()))
def to_cmp_dict(self):
"""Returns a dictionary that can be used for field comparison."""
@@ -464,7 +466,7 @@ class CompilerSpec(object):
arg = args[0]
# If there is one argument, it's either another CompilerSpec
# to copy or a string to parse
- if isinstance(arg, basestring):
+ if isinstance(arg, string_types):
c = SpecParser().parse_compiler(arg)
self.name = c.name
self.versions = c.versions
@@ -579,8 +581,11 @@ class DependencySpec(object):
self.deptypes = tuple(sorted(set(deptypes)))
def update_deptypes(self, deptypes):
- deptypes = tuple(sorted(set(deptypes)))
+ deptypes = set(deptypes)
+ deptypes.update(self.deptypes)
+ deptypes = tuple(sorted(deptypes))
changed = self.deptypes != deptypes
+
self.deptypes = deptypes
return changed
@@ -728,11 +733,10 @@ class FlagMap(HashableMap):
return clone
def _cmp_key(self):
- return tuple((k, tuple(v)) for k, v in sorted(self.iteritems()))
+ return tuple((k, tuple(v)) for k, v in sorted(iteritems(self)))
def __str__(self):
- sorted_keys = filter(
- lambda flag: self[flag] != [], sorted(self.keys()))
+ sorted_keys = [k for k in sorted(self.keys()) if self[k] != []]
cond_symbol = ' ' if len(sorted_keys) > 0 else ''
return cond_symbol + ' '.join(
str(key) + '=\"' + ' '.join(
@@ -918,7 +922,7 @@ class Spec(object):
return
# Parse if the spec_like is a string.
- if not isinstance(spec_like, basestring):
+ if not isinstance(spec_like, string_types):
raise TypeError("Can't make spec out of %s" % type(spec_like))
spec_list = SpecParser().parse(spec_like)
@@ -1018,9 +1022,9 @@ class Spec(object):
if name in self.variants:
raise DuplicateVariantError(
"Cannot specify variant '%s' twice" % name)
- if isinstance(value, basestring) and value.upper() == 'TRUE':
+ if isinstance(value, string_types) and value.upper() == 'TRUE':
value = True
- elif isinstance(value, basestring) and value.upper() == 'FALSE':
+ elif isinstance(value, string_types) and value.upper() == 'FALSE':
value = False
self.variants[name] = VariantSpec(name, value)
@@ -1056,7 +1060,7 @@ class Spec(object):
new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
self.architecture = ArchSpec(*new_vals)
else:
- new_attrvals = [(a, v) for a, v in kwargs.iteritems()
+ new_attrvals = [(a, v) for a, v in iteritems(kwargs)
if a in arch_attrs]
for new_attr, new_value in new_attrvals:
if getattr(self.architecture, new_attr):
@@ -1219,7 +1223,7 @@ class Spec(object):
# get initial values for kwargs
depth = kwargs.get('depth', False)
key_fun = kwargs.get('key', id)
- if isinstance(key_fun, basestring):
+ if isinstance(key_fun, string_types):
key_fun = attrgetter(key_fun)
yield_root = kwargs.get('root', True)
cover = kwargs.get('cover', 'nodes')
@@ -1314,8 +1318,12 @@ class Spec(object):
else:
yaml_text = syaml.dump(
self.to_node_dict(), default_flow_style=True, width=maxint)
- sha = hashlib.sha1(yaml_text)
+ sha = hashlib.sha1(yaml_text.encode('utf-8'))
+
b32_hash = base64.b32encode(sha.digest()).lower()
+ if sys.version_info[0] >= 3:
+ b32_hash = b32_hash.decode('utf-8')
+
if self.concrete:
self._hash = b32_hash
return b32_hash[:length]
@@ -1421,7 +1429,7 @@ class Spec(object):
formats so that reindex will work on old specs/databases.
"""
for dep_name, elt in dependency_dict.items():
- if isinstance(elt, basestring):
+ if isinstance(elt, string_types):
# original format, elt is just the dependency hash.
dag_hash, deptypes = elt, ['build', 'link']
elif isinstance(elt, tuple):
@@ -1566,14 +1574,12 @@ class Spec(object):
a problem.
"""
# Make an index of stuff this spec already provides
- # XXX(deptype): 'link' and 'run'?
self_index = ProviderIndex(self.traverse(), restrict=True)
changed = False
done = False
while not done:
done = True
- # XXX(deptype): 'link' and 'run'?
for spec in list(self.traverse()):
replacement = None
if spec.virtual:
@@ -1599,7 +1605,7 @@ class Spec(object):
# Replace spec with the candidate and normalize
copy = self.copy()
- copy[spec.name]._dup(replacement.copy(deps=False))
+ copy[spec.name]._dup(replacement, deps=False)
try:
# If there are duplicate providers or duplicate
@@ -1701,6 +1707,18 @@ class Spec(object):
# Mark everything in the spec as concrete, as well.
self._mark_concrete()
+ # Now that the spec is concrete we should check if
+ # there are declared conflicts
+ matches = []
+ for x in self.traverse():
+ for conflict_spec, when_list in x.package.conflicts.items():
+ if x.satisfies(conflict_spec):
+ for when_spec in when_list:
+ if x.satisfies(when_spec):
+ matches.append((x, conflict_spec, when_spec))
+ if matches:
+ raise ConflictsInSpecError(self, matches)
+
def _mark_concrete(self, value=True):
"""Mark this spec and its dependencies as concrete.
@@ -1798,6 +1816,8 @@ class Spec(object):
dependency already in this spec.
"""
assert(vdep.virtual)
+
+ # note that this defensively copies.
providers = provider_index.providers_for(vdep)
# If there is a provider for the vpkg, then use that instead of
@@ -1827,6 +1847,10 @@ class Spec(object):
provider_index):
"""Merge the dependency into this spec.
+ Caller should assume that this routine can owns the dep parameter
+ (i.e. it needs to be a copy of any internal structures like
+ dependencies on Package class objects).
+
This is the core of normalize(). There are some basic steps:
* If dep is virtual, evaluate whether it corresponds to an
@@ -1839,6 +1863,7 @@ class Spec(object):
constraints into this spec.
This method returns True if the spec was changed, False otherwise.
+
"""
changed = False
@@ -1851,7 +1876,8 @@ class Spec(object):
dep = provider
else:
index = ProviderIndex([dep], restrict=True)
- for vspec in (v for v in spec_deps.values() if v.virtual):
+ items = list(spec_deps.items())
+ for name, vspec in items:
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
@@ -1862,29 +1888,23 @@ class Spec(object):
raise UnsatisfiableProviderSpecError(required[0], dep)
provider_index.update(dep)
- # If the spec isn't already in the set of dependencies, clone
- # it from the package description.
+ # If the spec isn't already in the set of dependencies, add it.
+ # Note: dep is always owned by this method. If it's from the
+ # caller, it's a copy from _evaluate_dependency_conditions. If it
+ # comes from a vdep, it's a defensive copy from _find_provider.
if dep.name not in spec_deps:
- spec_deps[dep.name] = dep.copy()
+ spec_deps[dep.name] = dep
changed = True
else:
- dspec = spec_deps[dep.name]
- if self.name not in dspec._dependents:
- self._add_dependency(dspec, deptypes)
- else:
- dependent = dspec._dependents[self.name]
- changed = dependent.update_deptypes(deptypes)
-
- # Constrain package information with spec info
- try:
- changed |= spec_deps[dep.name].constrain(dep)
-
- except UnsatisfiableSpecError as e:
- e.message = "Invalid spec: '%s'. "
- e.message += "Package %s requires %s %s, but spec asked for %s"
- e.message %= (spec_deps[dep.name], dep.name,
- e.constraint_type, e.required, e.provided)
- raise e
+ # merge package/vdep information into spec
+ try:
+ changed |= spec_deps[dep.name].constrain(dep)
+ except UnsatisfiableSpecError as e:
+ e.message = "Invalid spec: '%s'. "
+ e.message += "Package %s requires %s %s, but spec asked for %s"
+ e.message %= (spec_deps[dep.name], dep.name,
+ e.constraint_type, e.required, e.provided)
+ raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
@@ -2094,6 +2114,9 @@ class Spec(object):
changed = False
for name in self.common_dependencies(other):
changed |= self[name].constrain(other[name], deps=False)
+ if name in self._dependencies:
+ changed |= self._dependencies[name].update_deptypes(
+ other._dependencies[name].deptypes)
# Update with additional constraints from other spec
for name in other.dep_difference(self):
@@ -2166,7 +2189,13 @@ class Spec(object):
# A concrete provider can satisfy a virtual dependency.
if not self.virtual and other.virtual:
- pkg = spack.repo.get(self.fullname)
+ try:
+ pkg = spack.repo.get(self.fullname)
+ except spack.repository.PackageLoadError:
+ # If we can't get package info on this spec, don't treat
+ # it as a provider of this vdep.
+ return False
+
if pkg.provides(other.name):
for provided, when_specs in pkg.provided.items():
if any(self.satisfies(when_spec, deps=False, strict=strict)
@@ -2219,7 +2248,7 @@ class Spec(object):
# If we need to descend into dependencies, do it, otherwise we're done.
if deps:
deps_strict = strict
- if self.concrete and not other.name:
+ if self._concrete and not other.name:
# We're dealing with existing specs
deps_strict = True
return self.satisfies_dependencies(other, strict=deps_strict)
@@ -2320,9 +2349,6 @@ class Spec(object):
self.external_module = other.external_module
self.namespace = other.namespace
- self.external = other.external
- self.external_module = other.external_module
-
# If we copy dependencies, preserve DAG structure in the new spec
if deps:
deptypes = alldeps # by default copy all deptypes
@@ -2336,6 +2362,7 @@ class Spec(object):
# These fields are all cached results of expensive operations.
# If we preserved the original structure, we can copy them
# safely. If not, they need to be recomputed.
+ # TODO: dependency hashes can be copied more aggressively.
if deps is True or deps == alldeps:
self._hash = other._hash
self._cmp_key_cache = other._cmp_key_cache
@@ -2407,11 +2434,8 @@ class Spec(object):
if query_parameters:
# We have extra query parameters, which are comma separated
# values
- f = cStringIO.StringIO(query_parameters.pop())
- try:
- query_parameters = next(csv.reader(f, skipinitialspace=True))
- except StopIteration:
- query_parameters = ['']
+ csv = query_parameters.pop().strip()
+ query_parameters = re.split(r'\s*,\s*', csv)
try:
value = next(
@@ -2721,41 +2745,6 @@ class Spec(object):
def dep_string(self):
return ''.join("^" + dep.format() for dep in self.sorted_deps())
- def __cmp__(self, other):
- from package_prefs import pkgsort
-
- # Package name sort order is not configurable, always goes alphabetical
- if self.name != other.name:
- return cmp(self.name, other.name)
-
- # Package version is second in compare order
- pkgname = self.name
- if self.versions != other.versions:
- return pkgsort().version_compare(
- pkgname, self.versions, other.versions)
-
- # Compiler is third
- if self.compiler != other.compiler:
- return pkgsort().compiler_compare(
- pkgname, self.compiler, other.compiler)
-
- # Variants
- if self.variants != other.variants:
- return pkgsort().variant_compare(
- pkgname, self.variants, other.variants)
-
- # Target
- if self.architecture != other.architecture:
- return pkgsort().architecture_compare(
- pkgname, self.architecture, other.architecture)
-
- # Dependency is not configurable
- if self._dependencies != other._dependencies:
- return -1 if self._dependencies < other._dependencies else 1
-
- # Equal specs
- return 0
-
def __str__(self):
ret = self.format() + self.dep_string()
return ret.strip()
@@ -2975,8 +2964,7 @@ class SpecParser(spack.parse.Parser):
spec.dag_hash()[:len(self.token.value)] == self.token.value]
if not matches:
- tty.die("%s does not match any installed packages." %
- self.token.value)
+ raise NoSuchHashError(self.token.value)
if len(matches) != 1:
raise AmbiguousHashError(
@@ -3348,9 +3336,27 @@ class InvalidHashError(SpecError):
% (hash, spec))
+class NoSuchHashError(SpecError):
+ def __init__(self, hash):
+ super(NoSuchHashError, self).__init__(
+ "No installed spec matches the hash: '%s'")
+
+
class RedundantSpecError(SpecError):
def __init__(self, spec, addition):
super(RedundantSpecError, self).__init__(
"Attempting to add %s to spec %s which is already concrete."
" This is likely the result of adding to a spec specified by hash."
% (addition, spec))
+
+
+class ConflictsInSpecError(SpecError, RuntimeError):
+ def __init__(self, spec, matches):
+ message = 'Conflicts in concretized spec "{0}"\n'.format(
+ spec.short_spec
+ )
+ long_message = 'List of matching conflicts:\n\n'
+ match_fmt = '{0}. "{1}" conflicts with "{2}" in spec "{3}"\n'
+ for idx, (s, c, w) in enumerate(matches):
+ long_message += match_fmt.format(idx + 1, c, w, s)
+ super(ConflictsInSpecError, self).__init__(message, long_message)
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index 91f77839d8..21db3d75c2 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -29,18 +29,19 @@ import hashlib
import shutil
import tempfile
import getpass
-from urlparse import urljoin
+from six import string_types
+from six import iteritems
+from six.moves.urllib.parse import urljoin
import llnl.util.tty as tty
import llnl.util.lock
from llnl.util.filesystem import *
-import spack.util.pattern as pattern
-
import spack
import spack.config
-import spack.fetch_strategy as fs
import spack.error
+import spack.fetch_strategy as fs
+import spack.util.pattern as pattern
from spack.version import *
from spack.util.path import canonicalize_path
from spack.util.crypto import prefix_bits, bit_length
@@ -84,7 +85,7 @@ def get_tmp_root():
if _tmp_root is None:
config = spack.config.get_config('config')
candidates = config['build_stage']
- if isinstance(candidates, basestring):
+ if isinstance(candidates, string_types):
candidates = [candidates]
path = _first_accessible_path(candidates)
@@ -188,7 +189,7 @@ class Stage(object):
"""
# TODO: fetch/stage coupling needs to be reworked -- the logic
# TODO: here is convoluted and not modular enough.
- if isinstance(url_or_fetch_strategy, basestring):
+ if isinstance(url_or_fetch_strategy, string_types):
self.fetcher = fs.from_url(url_or_fetch_strategy)
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
@@ -225,7 +226,7 @@ class Stage(object):
self._lock = None
if lock:
if self.name not in Stage.stage_locks:
- sha1 = hashlib.sha1(self.name).digest()
+ sha1 = hashlib.sha1(self.name.encode('utf-8')).digest()
lock_id = prefix_bits(sha1, bit_length(sys.maxsize))
stage_lock_path = join_path(spack.stage_path, '.lock')
@@ -548,7 +549,7 @@ class ResourceStage(Stage):
if not isinstance(placement, dict):
placement = {'': placement}
# Make the paths in the dictionary absolute and link
- for key, value in placement.iteritems():
+ for key, value in iteritems(placement):
target_path = join_path(
root_stage.source_path, resource.destination)
destination_path = join_path(target_path, value)
@@ -661,7 +662,7 @@ class DIYStage(object):
def _get_mirrors():
"""Get mirrors from spack configuration."""
config = spack.config.get_config('mirrors')
- return [val for name, val in config.iteritems()]
+ return [val for name, val in iteritems(config)]
def ensure_access(file=spack.stage_path):
@@ -689,5 +690,6 @@ class RestageError(StageError):
class ChdirError(StageError):
"""Raised when Spack can't change directories."""
+
# Keep this in namespace for convenience
FailedDownloadError = fs.FailedDownloadError
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
index fb4113361c..8f257cf0dc 100644
--- a/lib/spack/spack/test/architecture.py
+++ b/lib/spack/spack/test/architecture.py
@@ -138,8 +138,8 @@ def test_user_defaults(config):
def test_user_input_combination(config):
platform = spack.architecture.platform()
- os_list = platform.operating_sys.keys()
- target_list = platform.targets.keys()
+ os_list = list(platform.operating_sys.keys())
+ target_list = list(platform.targets.keys())
additional = ["fe", "be", "frontend", "backend"]
os_list.extend(additional)
diff --git a/lib/spack/spack/test/build_system_guess.py b/lib/spack/spack/test/build_system_guess.py
index 82bf1964b2..e6fb84b37d 100644
--- a/lib/spack/spack/test/build_system_guess.py
+++ b/lib/spack/spack/test/build_system_guess.py
@@ -38,6 +38,8 @@ import spack.stage
('setup.py', 'python'),
('NAMESPACE', 'r'),
('WORKSPACE', 'bazel'),
+ ('Makefile.PL', 'perlmake'),
+ ('Build.PL', 'perlbuild'),
('foobar', 'generic')
]
)
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index 304eb04a55..b57d39b441 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -22,19 +22,19 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import StringIO
import argparse
import codecs
import collections
import contextlib
import unittest
+from six import StringIO
import llnl.util.filesystem
import spack
import spack.cmd
import spack.cmd.install as install
-FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+FILE_REGISTRY = collections.defaultdict(StringIO)
# Monkey-patch open to write module files to a StringIO instance
@@ -44,7 +44,7 @@ def mock_open(filename, mode, *args):
message = 'test.test_install : unexpected opening mode for mock_open'
raise RuntimeError(message)
- FILE_REGISTRY[filename] = StringIO.StringIO()
+ FILE_REGISTRY[filename] = StringIO()
try:
yield FILE_REGISTRY[filename]
diff --git a/lib/spack/spack/test/cmd/url.py b/lib/spack/spack/test/cmd/url.py
index 4c60d814ce..3bc0bc7820 100644
--- a/lib/spack/spack/test/cmd/url.py
+++ b/lib/spack/spack/test/cmd/url.py
@@ -48,11 +48,12 @@ def test_name_parsed_correctly():
assert name_parsed_correctly(MyPackage('r-devtools', []), 'devtools')
assert name_parsed_correctly(MyPackage('py-numpy', []), 'numpy')
assert name_parsed_correctly(MyPackage('octave-splines', []), 'splines')
+ assert name_parsed_correctly(MyPackage('imagemagick', []), 'ImageMagick') # noqa
+ assert name_parsed_correctly(MyPackage('th-data', []), 'TH.data')
# Expected False
assert not name_parsed_correctly(MyPackage('', []), 'hdf5')
assert not name_parsed_correctly(MyPackage('hdf5', []), '')
- assert not name_parsed_correctly(MyPackage('imagemagick', []), 'ImageMagick') # noqa
assert not name_parsed_correctly(MyPackage('yaml-cpp', []), 'yamlcpp')
assert not name_parsed_correctly(MyPackage('yamlcpp', []), 'yaml-cpp')
assert not name_parsed_correctly(MyPackage('r-py-parser', []), 'parser')
@@ -64,6 +65,8 @@ def test_version_parsed_correctly():
assert version_parsed_correctly(MyPackage('', ['1.2.3']), '1.2.3')
assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4a')
assert version_parsed_correctly(MyPackage('', ['5.4a', '5.4b']), '5.4b')
+ assert version_parsed_correctly(MyPackage('', ['1.63.0']), '1_63_0')
+ assert version_parsed_correctly(MyPackage('', ['0.94h']), '094h')
# Expected False
assert not version_parsed_correctly(MyPackage('', []), '1.2.3')
@@ -95,7 +98,7 @@ def test_url_list(parser):
colored_urls = url_list(args)
assert colored_urls == total_urls
- # The following two options should print fewer URLs than the default.
+ # The following options should print fewer URLs than the default.
# If they print the same number of URLs, something is horribly broken.
# If they say we missed 0 URLs, something is probably broken too.
args = parser.parse_args(['list', '--incorrect-name'])
@@ -106,11 +109,19 @@ def test_url_list(parser):
incorrect_version_urls = url_list(args)
assert 0 < incorrect_version_urls < total_urls
+ args = parser.parse_args(['list', '--correct-name'])
+ correct_name_urls = url_list(args)
+ assert 0 < correct_name_urls < total_urls
-def test_url_test(parser):
- args = parser.parse_args(['test'])
+ args = parser.parse_args(['list', '--correct-version'])
+ correct_version_urls = url_list(args)
+ assert 0 < correct_version_urls < total_urls
+
+
+def test_url_summary(parser):
+ args = parser.parse_args(['summary'])
(total_urls, correct_names, correct_versions,
- name_count_dict, version_count_dict) = url_test(args)
+ name_count_dict, version_count_dict) = url_summary(args)
assert 0 < correct_names <= sum(name_count_dict.values()) <= total_urls # noqa
assert 0 < correct_versions <= sum(version_count_dict.values()) <= total_urls # noqa
diff --git a/lib/spack/spack/test/compilers.py b/lib/spack/spack/test/compilers.py
index d0fc506f40..bc21ec886e 100644
--- a/lib/spack/spack/test/compilers.py
+++ b/lib/spack/spack/test/compilers.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import pytest
+from six import iteritems
import spack.spec
import spack.compilers as compilers
@@ -30,7 +31,7 @@ import spack.compilers as compilers
@pytest.mark.usefixtures('config')
class TestCompilers(object):
-
+
def test_get_compiler_duplicates(self):
# In this case there is only one instance of the specified compiler in
# the test configuration (so it is not actually a duplicate), but the
@@ -38,11 +39,11 @@ class TestCompilers(object):
cfg_file_to_duplicates = compilers.get_compiler_duplicates(
'gcc@4.5.0', spack.spec.ArchSpec('cray-CNL-xeon'))
assert len(cfg_file_to_duplicates) == 1
- cfg_file, duplicates = cfg_file_to_duplicates.iteritems().next()
+ cfg_file, duplicates = next(iteritems(cfg_file_to_duplicates))
assert len(duplicates) == 1
def test_all_compilers(self):
all_compilers = compilers.all_compilers()
- filtered = list(x for x in all_compilers if str(x.spec) == 'clang@3.3')
- filtered = list(x for x in filtered if x.operating_system == 'SuSE11')
+ filtered = [x for x in all_compilers if str(x.spec) == 'clang@3.3']
+ filtered = [x for x in filtered if x.operating_system == 'SuSE11']
assert len(filtered) == 1
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index f4021a89ee..3b383584ce 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -26,7 +26,7 @@ import pytest
import spack
import spack.architecture
from spack.concretize import find_spec
-from spack.spec import Spec, CompilerSpec
+from spack.spec import Spec, CompilerSpec, ConflictsInSpecError, SpecError
from spack.version import ver
@@ -82,6 +82,10 @@ def check_concretize(abstract_spec):
'mpileaks ^mpi', 'mpileaks ^mpi@:1.1', 'mpileaks ^mpi@2:',
'mpileaks ^mpi@2.1', 'mpileaks ^mpi@2.2', 'mpileaks ^mpi@2.2',
'mpileaks ^mpi@:1', 'mpileaks ^mpi@1.2:2'
+ # conflict not triggered
+ 'conflict',
+ 'conflict%clang~foo',
+ 'conflict-parent%gcc'
]
)
def spec(request):
@@ -89,6 +93,19 @@ def spec(request):
return request.param
+@pytest.fixture(
+ params=[
+ 'conflict%clang',
+ 'conflict%clang+foo',
+ 'conflict-parent%clang',
+ 'conflict-parent@0.9^conflict~foo'
+ ]
+)
+def conflict_spec(request):
+ """Spec to be concretized"""
+ return request.param
+
+
@pytest.mark.usefixtures('config', 'builtin_mock')
class TestConcretize(object):
def test_concretize(self, spec):
@@ -372,3 +389,11 @@ class TestConcretize(object):
s.concretize()
assert s['mpileaks'].satisfies('%clang')
assert s['dyninst'].satisfies('%gcc')
+
+ def test_conflicts_in_spec(self, conflict_spec):
+ # Check that an exception is raised an caught by the appropriate
+ # exception types.
+ for exc_type in (ConflictsInSpecError, RuntimeError, SpecError):
+ s = Spec(conflict_spec)
+ with pytest.raises(exc_type):
+ s.concretize()
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
index 54df4e1563..bf915064b2 100644
--- a/lib/spack/spack/test/concretize_preferences.py
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -27,7 +27,7 @@ import pytest
import spack
import spack.util.spack_yaml as syaml
from spack.spec import Spec
-from spack.package_prefs import PreferredPackages
+import spack.package_prefs
@pytest.fixture()
@@ -41,7 +41,7 @@ def concretize_scope(config, tmpdir):
# This is kind of weird, but that's how config scopes are
# set in ConfigScope.__init__
spack.config.config_scopes.pop('concretize')
- spack.package_prefs._pkgsort = PreferredPackages()
+ spack.package_prefs.PackagePrefs.clear_caches()
# reset provider index each time, too
spack.repo._provider_index = None
@@ -55,7 +55,7 @@ def update_packages(pkgname, section, value):
"""Update config and reread package list"""
conf = {pkgname: {section: value}}
spack.config.update_config('packages', conf, 'concretize')
- spack.package_prefs._pkgsort = PreferredPackages()
+ spack.package_prefs.PackagePrefs.clear_caches()
def assert_variant_values(spec, **variants):
@@ -146,7 +146,7 @@ all:
spack.config.update_config('packages', conf, 'concretize')
# should be no error for 'all':
- spack.package_prefs._pkgsort = PreferredPackages()
+ spack.package_prefs.PackagePrefs.clear_caches()
spack.package_prefs.get_packages_config()
def test_external_mpi(self):
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index f344727674..fc1d6ecec2 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -27,11 +27,12 @@ import copy
import os
import re
import shutil
+from six import StringIO
-import cStringIO
import llnl.util.filesystem
import llnl.util.lang
import ordereddict_backport
+
import py
import pytest
import spack
@@ -56,11 +57,8 @@ def no_stdin_duplication(monkeypatch):
"""Duplicating stdin (or any other stream) returns an empty
cStringIO object.
"""
- monkeypatch.setattr(
- llnl.util.lang,
- 'duplicate_stream',
- lambda x: cStringIO.StringIO()
- )
+ monkeypatch.setattr(llnl.util.lang, 'duplicate_stream',
+ lambda x: StringIO())
@pytest.fixture(autouse=True)
@@ -170,15 +168,19 @@ def configuration_dir(tmpdir_factory, linux_os):
def config(configuration_dir):
"""Hooks the mock configuration files into spack.config"""
# Set up a mock config scope
+ spack.package_prefs.PackagePrefs.clear_caches()
spack.config.clear_config_caches()
real_scope = spack.config.config_scopes
spack.config.config_scopes = ordereddict_backport.OrderedDict()
spack.config.ConfigScope('site', str(configuration_dir.join('site')))
spack.config.ConfigScope('user', str(configuration_dir.join('user')))
Config = collections.namedtuple('Config', ['real', 'mock'])
+
yield Config(real=real_scope, mock=spack.config.config_scopes)
+
spack.config.config_scopes = real_scope
spack.config.clear_config_caches()
+ spack.package_prefs.PackagePrefs.clear_caches()
@pytest.fixture(scope='module')
@@ -312,7 +314,7 @@ def mock_archive():
"\ttouch $prefix/dummy_file\n"
"EOF\n"
)
- os.chmod(configure_path, 0755)
+ os.chmod(configure_path, 0o755)
# Archive it
current = tmpdir.chdir()
archive_name = '{0}.tar.gz'.format(repo_name)
diff --git a/lib/spack/spack/test/data/web/1.html b/lib/spack/spack/test/data/web/1.html
new file mode 100644
index 0000000000..ef49c38cdb
--- /dev/null
+++ b/lib/spack/spack/test/data/web/1.html
@@ -0,0 +1,10 @@
+<html>
+ <head>
+ This is page 1.
+ </head>
+ <body>
+ <a href="2.html">list_depth=2 follows this.</a>
+
+ <a href="foo-1.0.0.tar.gz">foo-1.0.0.tar.gz</a>
+ </body>
+</html>
diff --git a/lib/spack/spack/test/data/web/2.html b/lib/spack/spack/test/data/web/2.html
new file mode 100644
index 0000000000..64c843f25b
--- /dev/null
+++ b/lib/spack/spack/test/data/web/2.html
@@ -0,0 +1,12 @@
+<html>
+ <head>
+ This is page 2.
+ </head>
+ <body>
+ <a href="3.html">list_depth=3 follows this.</a>
+ <a href="4.html">list_depth=3 follows this too.</a>
+
+ <a href="foo-2.0.0.tar.gz">foo-2.0.0.tar.gz</a>
+ <a href="foo-2.0.0b2.tar.gz">foo-2.0.0b2.tar.gz</a>
+ </body>
+</html>
diff --git a/lib/spack/spack/test/data/web/3.html b/lib/spack/spack/test/data/web/3.html
new file mode 100644
index 0000000000..e530206035
--- /dev/null
+++ b/lib/spack/spack/test/data/web/3.html
@@ -0,0 +1,11 @@
+<html>
+ <head>
+ This is page 3.
+ </head>
+ <body>
+ <a href="index.html">This link is already visited.</a>
+
+ <a href="foo-3.0.tar.gz">foo-3.0.tar.gz</a>
+ <a href="foo-3.0a1.tar.gz">foo-3.0a1.tar.gz</a>
+ </body>
+</html>
diff --git a/lib/spack/spack/test/data/web/4.html b/lib/spack/spack/test/data/web/4.html
new file mode 100644
index 0000000000..b5fe850f4d
--- /dev/null
+++ b/lib/spack/spack/test/data/web/4.html
@@ -0,0 +1,11 @@
+<html>
+ <head>
+ This is page 4.
+ </head>
+ <body>
+ This page is terminal and has no links to other pages.
+
+ <a href="foo-4.5.tar.gz">foo-4.5.tar.gz.</a>
+ <a href="foo-4.5-rc5.tar.gz">foo-4.1-rc5.tar.gz.</a>
+ </body>
+</html>
diff --git a/lib/spack/spack/test/data/web/index.html b/lib/spack/spack/test/data/web/index.html
new file mode 100644
index 0000000000..3985deeb35
--- /dev/null
+++ b/lib/spack/spack/test/data/web/index.html
@@ -0,0 +1,10 @@
+<html>
+ <head>
+ This is the root page.
+ </head>
+ <body>
+ <a href="1.html">list_depth=1 follows this.</a>
+
+ <a href="foo-0.0.0.tar.gz">foo-0.0.0.tar.gz</a>
+ </body>
+</html>
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index 1987bb3a44..d1365c0e76 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -133,23 +133,25 @@ def test_read_and_write_spec(
# TODO: increase reuse of build dependencies.
stored_deptypes = ('link', 'run')
expected = spec.copy(deps=stored_deptypes)
+ assert expected.concrete
assert expected == spec_from_file
- assert expected.eq_dag # msg , spec_from_file
+ assert expected.eq_dag(spec_from_file)
assert spec_from_file.concrete
# Ensure that specs that come out "normal" are really normal.
with open(spec_path) as spec_file:
read_separately = Spec.from_yaml(spec_file.read())
- # TODO: revise this when build deps are in dag_hash
- norm = read_separately.normalized().copy(deps=stored_deptypes)
- assert norm == spec_from_file
+ # TODO: revise this when build deps are in dag_hash
+ norm = read_separately.normalized().copy(deps=stored_deptypes)
+ assert norm == spec_from_file
+ assert norm.eq_dag(spec_from_file)
- # TODO: revise this when build deps are in dag_hash
- conc = read_separately.concretized().copy(deps=stored_deptypes)
- assert conc == spec_from_file
+ # TODO: revise this when build deps are in dag_hash
+ conc = read_separately.concretized().copy(deps=stored_deptypes)
+ assert conc == spec_from_file
+ assert conc.eq_dag(spec_from_file)
- # Make sure the hash of the read-in spec is the same
assert expected.dag_hash() == spec_from_file.dag_hash()
# Ensure directories are properly removed
diff --git a/lib/spack/spack/test/graph.py b/lib/spack/spack/test/graph.py
index ce7b07ed86..46dd4f1bc6 100644
--- a/lib/spack/spack/test/graph.py
+++ b/lib/spack/spack/test/graph.py
@@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from StringIO import StringIO
+from six import StringIO
from spack.spec import Spec
from spack.graph import AsciiGraph, topological_sort, graph_dot
diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py
index 4f62cd85e9..214797c1f6 100644
--- a/lib/spack/spack/test/lock.py
+++ b/lib/spack/spack/test/lock.py
@@ -283,7 +283,7 @@ class LockTest(unittest.TestCase):
# ensure lock file exists the first time, so we open it read-only
# to begin wtih.
touch(self.lock_path)
- os.chmod(self.lock_path, 0444)
+ os.chmod(self.lock_path, 0o444)
lock = Lock(self.lock_path)
self.assertTrue(lock._reads == 0)
diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py
index 87a43a529a..1b3f384a8b 100644
--- a/lib/spack/spack/test/make_executable.py
+++ b/lib/spack/spack/test/make_executable.py
@@ -46,7 +46,7 @@ class MakeExecutableTest(unittest.TestCase):
with open(make_exe, 'w') as f:
f.write('#!/bin/sh\n')
f.write('echo "$@"')
- os.chmod(make_exe, 0700)
+ os.chmod(make_exe, 0o700)
path_put_first('PATH', [self.tmpdir])
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index bb1b0006f8..0eb54cba2c 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -24,14 +24,14 @@
##############################################################################
import collections
import contextlib
+from six import StringIO
-import cStringIO
import pytest
import spack.modules
import spack.spec
# Our "filesystem" for the tests below
-FILE_REGISTRY = collections.defaultdict(cStringIO.StringIO)
+FILE_REGISTRY = collections.defaultdict(StringIO)
# Spec strings that will be used throughout the tests
mpich_spec_string = 'mpich@3.0.4'
mpileaks_spec_string = 'mpileaks'
@@ -48,7 +48,7 @@ def stringio_open(monkeypatch):
if not mode == 'w':
raise RuntimeError('unexpected opening mode for stringio_open')
- FILE_REGISTRY[filename] = cStringIO.StringIO()
+ FILE_REGISTRY[filename] = StringIO()
try:
yield FILE_REGISTRY[filename]
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index fbcc70afe8..003936a77a 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -86,7 +86,7 @@ def test_default_works(builtin_mock):
def test_target_match(builtin_mock):
platform = spack.architecture.platform()
- targets = platform.targets.values()
+ targets = list(platform.targets.values())
for target in targets[:-1]:
pkg = spack.repo.get('multimethod target=' + target.name)
assert pkg.different_by_target() == target.name
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index c75d7cdcc7..ac318f94dc 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -22,49 +22,48 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""\
-This test does sanity checks on Spack's builtin package database.
-"""
-import unittest
+"""This test does sanity checks on Spack's builtin package database."""
+
import re
import spack
from spack.repository import RepoPath
-class PackageSanityTest(unittest.TestCase):
+def check_db():
+ """Get all packages in a DB to make sure they work."""
+ for name in spack.repo.all_package_names():
+ spack.repo.get(name)
+
+
+def test_get_all_packages():
+ """Get all packages once and make sure that works."""
+ check_db()
+
- def check_db(self):
- """Get all packages in a DB to make sure they work."""
- for name in spack.repo.all_package_names():
- spack.repo.get(name)
+def test_get_all_mock_packages():
+ """Get the mock packages once each too."""
+ db = RepoPath(spack.mock_packages_path)
+ spack.repo.swap(db)
+ check_db()
+ spack.repo.swap(db)
- def test_get_all_packages(self):
- """Get all packages once and make sure that works."""
- self.check_db()
- def test_get_all_mock_packages(self):
- """Get the mock packages once each too."""
- db = RepoPath(spack.mock_packages_path)
- spack.repo.swap(db)
- self.check_db()
- spack.repo.swap(db)
+def test_url_versions():
+ """Check URLs for regular packages, if they are explicitly defined."""
+ for pkg in spack.repo.all_packages():
+ for v, vattrs in pkg.versions.items():
+ if 'url' in vattrs:
+ # If there is a url for the version check it.
+ v_url = pkg.url_for_version(v)
+ assert vattrs['url'] == v_url
- def test_url_versions(self):
- """Check URLs for regular packages, if they are explicitly defined."""
- for pkg in spack.repo.all_packages():
- for v, vattrs in pkg.versions.items():
- if 'url' in vattrs:
- # If there is a url for the version check it.
- v_url = pkg.url_for_version(v)
- self.assertEqual(vattrs['url'], v_url)
- def test_all_versions_are_lowercase(self):
- """Spack package names must be lowercase, and use `-` instead of `_`.
- """
- errors = []
- for name in spack.repo.all_package_names():
- if re.search(r'[_A-Z]', name):
- errors.append(name)
+def test_all_versions_are_lowercase():
+ """Spack package names must be lowercase, and use `-` instead of `_`."""
+ errors = []
+ for name in spack.repo.all_package_names():
+ if re.search(r'[_A-Z]', name):
+ errors.append(name)
- self.assertEqual([], errors)
+ assert len(errors) == 0
diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py
index 0c772a0d2d..b76f88e670 100644
--- a/lib/spack/spack/test/pattern.py
+++ b/lib/spack/spack/test/pattern.py
@@ -86,6 +86,7 @@ class CompositeTest(unittest.TestCase):
composite.append(self.Two())
composite.add()
self.assertEqual(self.Base.counter, 3)
+
composite.pop()
composite.subtract()
self.assertEqual(self.Base.counter, 2)
diff --git a/lib/spack/spack/test/provider_index.py b/lib/spack/spack/test/provider_index.py
index a176d0c315..69a5c3cd40 100644
--- a/lib/spack/spack/test/provider_index.py
+++ b/lib/spack/spack/test/provider_index.py
@@ -37,7 +37,8 @@ Tests assume that mock packages provide this::
mpi@:10.0: set([zmpi])},
'stuff': {stuff: set([externalvirtual])}}
"""
-import StringIO
+from six import StringIO
+
import spack
from spack.provider_index import ProviderIndex
from spack.spec import Spec
@@ -46,10 +47,10 @@ from spack.spec import Spec
def test_yaml_round_trip(builtin_mock):
p = ProviderIndex(spack.repo.all_package_names())
- ostream = StringIO.StringIO()
+ ostream = StringIO()
p.to_yaml(ostream)
- istream = StringIO.StringIO(ostream.getvalue())
+ istream = StringIO(ostream.getvalue())
q = ProviderIndex.from_yaml(istream)
assert p == q
diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py
index 5af55bdc5f..ee0ff9d2c9 100644
--- a/lib/spack/spack/test/python_version.py
+++ b/lib/spack/spack/test/python_version.py
@@ -1,5 +1,5 @@
##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
@@ -22,26 +22,54 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""
-This test ensures that all Spack files are Python version 2.6 or less.
+"""Check that Spack complies with minimum supported python versions.
+
+We ensure that all Spack files work with Python2 >= 2.6 and Python3 >= 3.0.
-Spack was originally 2.7, but enough systems in 2014 are still using
-2.6 on their frontend nodes that we need 2.6 to get adopted.
+We'd like to drop 2.6 support at some point, but there are still many HPC
+systems that ship with RHEL6/CentOS 6, which have Python 2.6 as the
+default version. Once those go away, we can likely drop 2.6 and increase
+the minimum supported Python 3 version, as well.
"""
+from __future__ import print_function
+
import os
+import sys
import re
import unittest
import llnl.util.tty as tty
-import pyqver2
import spack
-spack_max_version = (2, 6)
+#
+# This test uses pyqver, by Greg Hewgill, which is a dual-source module.
+# That means we need to do different checks depending on whether we're
+# running Python 2 or Python 3.
+#
+if sys.version_info[0] < 3:
+ import pyqver2 as pyqver
+ spack_min_supported = (2, 6)
+
+ # Exclude Python 3 versions of dual-source modules when using Python 2
+ exclude_paths = [
+ os.path.join(spack.lib_path, 'external', 'yaml', 'lib3'),
+ os.path.join(spack.lib_path, 'external', 'pyqver3.py')]
+
+else:
+ import pyqver3 as pyqver
+ spack_min_supported = (3, 0)
+
+ # Exclude Python 2 versions of dual-source modules when using Python 3
+ exclude_paths = [
+ os.path.join(spack.lib_path, 'external', 'yaml', 'lib'),
+ os.path.join(spack.lib_path, 'external', 'pyqver2.py')]
class PythonVersionTest(unittest.TestCase):
- def pyfiles(self, *search_paths):
+ def pyfiles(self, search_paths, exclude=()):
+ """List python search files in a set of search paths, excluding
+ any paths in the exclude list"""
# first file is the spack script.
yield spack.spack_file
@@ -49,53 +77,71 @@ class PythonVersionTest(unittest.TestCase):
for path in search_paths:
for root, dirnames, filenames in os.walk(path):
for filename in filenames:
+ realpath = os.path.realpath(os.path.join(root, filename))
+ if any(realpath.startswith(p) for p in exclude):
+ continue
+
if re.match(r'^[^.#].*\.py$', filename):
yield os.path.join(root, filename)
- def package_py_files(self):
- for name in spack.repo.all_package_names():
- yield spack.repo.filename_for_package_name(name)
-
- def check_python_versions(self, *files):
- # dict version -> filename -> reasons
+ def check_python_versions(self, files):
+ # This is a dict dict mapping:
+ # version -> filename -> reasons
+ #
+ # Reasons are tuples of (lineno, string), where the string is the
+ # cause for a version incompatibility.
all_issues = {}
- for fn in files:
- with open(fn) as pyfile:
- versions = pyqver2.get_versions(pyfile.read())
- for ver, reasons in versions.items():
- if ver > spack_max_version:
- if ver not in all_issues:
- all_issues[ver] = {}
- all_issues[ver][fn] = reasons
+ # Parse files and run pyqver on each file.
+ for path in files:
+ with open(path) as pyfile:
+ full_text = pyfile.read()
+ versions = pyqver.get_versions(full_text, path)
+
+ for ver, reasons in versions.items():
+ if ver <= spack_min_supported:
+ continue
+
+ # Record issues. Mark exceptions with '# nopyqver' comment
+ for lineno, cause in reasons:
+ lines = full_text.split('\n')
+ if not re.search(r'#\s*nopyqver\s*$', lines[lineno - 1]):
+ all_issues.setdefault(ver, {})[path] = reasons
+ # Print a message if there are are issues
if all_issues:
- tty.error("Spack must run on Python version %d.%d"
- % spack_max_version)
+ tty.msg("Spack must remain compatible with Python version %d.%d"
+ % spack_min_supported)
+ # Print out a table showing which files/linenos require which
+ # python version, and a string describing why.
for v in sorted(all_issues.keys(), reverse=True):
- msgs = []
- for fn in sorted(all_issues[v].keys()):
- short_fn = fn
- if fn.startswith(spack.prefix):
- short_fn = fn[len(spack.prefix):]
-
- reasons = [r for r in set(all_issues[v][fn]) if r]
- for r in reasons:
- msgs.append(("%s:%s" % ('spack' + short_fn, r[0]), r[1]))
-
- tty.error("These files require version %d.%d:" % v)
- maxlen = max(len(f) for f, prob in msgs)
+ messages = []
+ for path in sorted(all_issues[v].keys()):
+ short_path = path
+ if path.startswith(spack.prefix):
+ short_path = path[len(spack.prefix):]
+
+ reasons = [r for r in set(all_issues[v][path]) if r]
+ for lineno, cause in reasons:
+ file_line = "%s:%s" % (short_path.lstrip('/'), lineno)
+ messages.append((file_line, cause))
+
+ print()
+ tty.msg("These files require version %d.%d:" % v)
+ maxlen = max(len(f) for f, prob in messages)
fmt = "%%-%ds%%s" % (maxlen + 3)
- print fmt % ('File', 'Reason')
- print fmt % ('-' * (maxlen), '-' * 20)
- for msg in msgs:
- print fmt % msg
+ print(fmt % ('File', 'Reason'))
+ print(fmt % ('-' * (maxlen), '-' * 20))
+ for msg in messages:
+ print(fmt % msg)
+ # Fail this test if there were issues.
self.assertTrue(len(all_issues) == 0)
def test_core_module_compatibility(self):
- self.check_python_versions(*self.pyfiles(spack.lib_path))
+ self.check_python_versions(
+ self.pyfiles([spack.lib_path], exclude=exclude_paths))
def test_package_module_compatibility(self):
- self.check_python_versions(*self.pyfiles(spack.packages_path))
+ self.check_python_versions(self.pyfiles([spack.packages_path]))
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 2c414bd0c0..af6a4efd95 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -63,8 +63,7 @@ def set_dependency(saved_deps):
pkg = spack.repo.get(pkg_name)
if pkg_name not in saved_deps:
saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
- # Change dep spec
- # XXX(deptype): handle deptypes.
+
pkg.dependencies[spec.name] = {Spec(pkg_name): spec}
pkg.dependency_types[spec.name] = set(deptypes)
return _mock
@@ -90,7 +89,7 @@ class TestSpecDag(object):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'zmpi', 'fake']
- pairs = zip([0, 1, 2, 3, 4, 2, 3], names)
+ pairs = list(zip([0, 1, 2, 3, 4, 2, 3], names))
traversal = dag.traverse()
assert [x.name for x in traversal] == names
@@ -104,7 +103,7 @@ class TestSpecDag(object):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi']
- pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names)
+ pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names))
traversal = dag.traverse(cover='edges')
assert [x.name for x in traversal] == names
@@ -118,7 +117,7 @@ class TestSpecDag(object):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
- pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names)
+ pairs = list(zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names))
traversal = dag.traverse(cover='paths')
assert [x.name for x in traversal] == names
@@ -132,7 +131,7 @@ class TestSpecDag(object):
names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
'callpath', 'mpileaks']
- pairs = zip([4, 3, 2, 3, 2, 1, 0], names)
+ pairs = list(zip([4, 3, 2, 3, 2, 1, 0], names))
traversal = dag.traverse(order='post')
assert [x.name for x in traversal] == names
@@ -146,7 +145,7 @@ class TestSpecDag(object):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'zmpi', 'mpileaks']
- pairs = zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names)
+ pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names))
traversal = dag.traverse(cover='edges', order='post')
assert [x.name for x in traversal] == names
@@ -160,7 +159,7 @@ class TestSpecDag(object):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'fake', 'zmpi', 'mpileaks']
- pairs = zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names)
+ pairs = list(zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names))
traversal = dag.traverse(cover='paths', order='post')
assert [x.name for x in traversal] == names
@@ -609,6 +608,8 @@ class TestSpecDag(object):
assert '^mpich2' in s2
def test_construct_spec_with_deptypes(self):
+ """Ensure that it is possible to construct a spec with explicit
+ dependency types."""
s = Spec('a',
Spec('b',
['build'], Spec('c')),
@@ -633,7 +634,12 @@ class TestSpecDag(object):
assert s['f']._dependents['e'].deptypes == ('run',)
def check_diamond_deptypes(self, spec):
- """Validate deptypes in dt-diamond spec."""
+ """Validate deptypes in dt-diamond spec.
+
+ This ensures that concretization works properly when two packages
+ depend on the same dependency in different ways.
+
+ """
assert spec['dt-diamond']._dependencies[
'dt-diamond-left'].deptypes == ('build', 'link')
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 2f3b2b1b8d..f071bcc833 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -293,7 +293,7 @@ class TestSpecSematics(object):
copy = spec.copy()
for s in spec.traverse():
assert s.satisfies(copy[s.name])
- assert copy[s.name].satisfies(s)
+ assert copy[s.name].satisfies(s)
def test_unsatisfiable_compiler_flag_mismatch(self):
# No matchi in specs
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index fcb6cfa907..dfad4a019f 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -122,7 +122,7 @@ class TestSpecSyntax(object):
def _check_raises(self, exc_type, items):
for item in items:
with pytest.raises(exc_type):
- self.check_parse(item)
+ Spec(item)
# ========================================================================
# Parse checks
@@ -225,113 +225,174 @@ class TestSpecSyntax(object):
errors = ['x@@1.2', 'x ^y@@1.2', 'x@1.2::', 'x::']
self._check_raises(SpecParseError, errors)
+ def _check_hash_parse(self, spec):
+ """Check several ways to specify a spec by hash."""
+ # full hash
+ self.check_parse(str(spec), '/' + spec.dag_hash())
+
+ # partial hash
+ self.check_parse(str(spec), '/ ' + spec.dag_hash()[:5])
+
+ # name + hash
+ self.check_parse(str(spec), spec.name + '/' + spec.dag_hash())
+
+ # name + version + space + partial hash
+ self.check_parse(
+ str(spec), spec.name + '@' + str(spec.version) +
+ ' /' + spec.dag_hash()[:6])
+
def test_spec_by_hash(self, database):
specs = database.mock.db.query()
- hashes = [s._hash for s in specs] # Preserves order of elements
-
- # Make sure the database is still the shape we expect
- assert len(specs) > 3
+ assert len(specs) # make sure something's in the DB
- self.check_parse(str(specs[0]), '/' + hashes[0])
- self.check_parse(str(specs[1]), '/ ' + hashes[1][:5])
- self.check_parse(str(specs[2]), specs[2].name + '/' + hashes[2])
- self.check_parse(str(specs[3]),
- specs[3].name + '@' + str(specs[3].version) +
- ' /' + hashes[3][:6])
+ for spec in specs:
+ self._check_hash_parse(spec)
def test_dep_spec_by_hash(self, database):
- specs = database.mock.db.query()
- hashes = [s._hash for s in specs] # Preserves order of elements
-
- # Make sure the database is still the shape we expect
- assert len(specs) > 10
- assert specs[4].name in specs[10]
- assert specs[-1].name in specs[10]
-
- spec1 = sp.Spec(specs[10].name + '^/' + hashes[4])
- assert specs[4].name in spec1 and spec1[specs[4].name] == specs[4]
- spec2 = sp.Spec(specs[10].name + '%' + str(specs[10].compiler) +
- ' ^ / ' + hashes[-1])
- assert (specs[-1].name in spec2 and
- spec2[specs[-1].name] == specs[-1] and
- spec2.compiler == specs[10].compiler)
- spec3 = sp.Spec(specs[10].name + '^/' + hashes[4][:4] +
- '^ / ' + hashes[-1][:5])
- assert (specs[-1].name in spec3 and
- spec3[specs[-1].name] == specs[-1] and
- specs[4].name in spec3 and spec3[specs[4].name] == specs[4])
+ mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
+ zmpi = database.mock.db.query_one('zmpi')
+ fake = database.mock.db.query_one('fake')
- def test_multiple_specs_with_hash(self, database):
- specs = database.mock.db.query()
- hashes = [s._hash for s in specs] # Preserves order of elements
-
- assert len(specs) > 3
-
- output = sp.parse(specs[0].name + '/' + hashes[0] + '/' + hashes[1])
- assert len(output) == 2
- output = sp.parse('/' + hashes[0] + '/' + hashes[1])
- assert len(output) == 2
- output = sp.parse('/' + hashes[0] + '/' + hashes[1] +
- ' ' + specs[2].name)
- assert len(output) == 3
- output = sp.parse('/' + hashes[0] +
- ' ' + specs[1].name + ' ' + specs[2].name)
- assert len(output) == 3
- output = sp.parse('/' + hashes[0] + ' ' +
- specs[1].name + ' / ' + hashes[1])
- assert len(output) == 2
+ assert 'fake' in mpileaks_zmpi
+ assert 'zmpi' in mpileaks_zmpi
- def test_ambiguous_hash(self, database):
- specs = database.mock.db.query()
- hashes = [s._hash for s in specs] # Preserves order of elements
+ mpileaks_hash_fake = sp.Spec('mpileaks ^/' + fake.dag_hash())
+ assert 'fake' in mpileaks_hash_fake
+ assert mpileaks_hash_fake['fake'] == fake
+
+ mpileaks_hash_zmpi = sp.Spec(
+ 'mpileaks %' + str(mpileaks_zmpi.compiler) +
+ ' ^ / ' + zmpi.dag_hash())
+ assert 'zmpi' in mpileaks_hash_zmpi
+ assert mpileaks_hash_zmpi['zmpi'] == zmpi
+ assert mpileaks_hash_zmpi.compiler == mpileaks_zmpi.compiler
+
+ mpileaks_hash_fake_and_zmpi = sp.Spec(
+ 'mpileaks ^/' + fake.dag_hash()[:4] + '^ / ' + zmpi.dag_hash()[:5])
+ assert 'zmpi' in mpileaks_hash_fake_and_zmpi
+ assert mpileaks_hash_fake_and_zmpi['zmpi'] == zmpi
- # Make sure the database is as expected
- assert hashes[1][:1] == hashes[2][:1] == 'b'
+ assert 'fake' in mpileaks_hash_fake_and_zmpi
+ assert mpileaks_hash_fake_and_zmpi['fake'] == fake
- ambiguous_hashes = ['/b',
- specs[1].name + '/b',
- specs[0].name + '^/b',
- specs[0].name + '^' + specs[1].name + '/b']
- self._check_raises(AmbiguousHashError, ambiguous_hashes)
+ def test_multiple_specs_with_hash(self, database):
+ mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
+ callpath_mpich2 = database.mock.db.query_one('callpath ^mpich2')
+
+ # name + hash + separate hash
+ specs = sp.parse('mpileaks /' + mpileaks_zmpi.dag_hash() +
+ '/' + callpath_mpich2.dag_hash())
+ assert len(specs) == 2
+
+ # 2 separate hashes
+ specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
+ '/' + callpath_mpich2.dag_hash())
+ assert len(specs) == 2
+
+ # 2 separate hashes + name
+ specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
+ '/' + callpath_mpich2.dag_hash() +
+ ' callpath')
+ assert len(specs) == 3
+
+ # hash + 2 names
+ specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
+ ' callpath' +
+ ' callpath')
+ assert len(specs) == 3
+
+ # hash + name + hash
+ specs = sp.parse('/' + mpileaks_zmpi.dag_hash() +
+ ' callpath' +
+ ' / ' + callpath_mpich2.dag_hash())
+ assert len(specs) == 2
+
+ def test_ambiguous_hash(self, database):
+ dbspecs = database.mock.db.query()
+
+ def find_ambiguous(specs, keyfun):
+ """Return the first set of specs that's ambiguous under a
+ particular key function."""
+ key_to_spec = {}
+ for spec in specs:
+ key = keyfun(spec)
+ speclist = key_to_spec.setdefault(key, [])
+ speclist.append(spec)
+ if len(speclist) > 1:
+ return (key, speclist)
+
+ # If we fail here, we may need to guarantee that there are
+ # some ambiguos specs by adding more specs to the test DB
+ # until this succeeds.
+ raise RuntimeError("no ambiguous specs found for keyfun!")
+
+ # ambiguity in first hash character
+ char, specs = find_ambiguous(dbspecs, lambda s: s.dag_hash()[0])
+ self._check_raises(AmbiguousHashError, ['/' + char])
+
+ # ambiguity in first hash character AND spec name
+ t, specs = find_ambiguous(dbspecs,
+ lambda s: (s.name, s.dag_hash()[0]))
+ name, char = t
+ self._check_raises(AmbiguousHashError, [name + '/' + char])
def test_invalid_hash(self, database):
- specs = database.mock.db.query()
- hashes = [s._hash for s in specs] # Preserves order of elements
+ mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
+ zmpi = database.mock.db.query_one('zmpi')
- # Make sure the database is as expected
- assert (hashes[0] != hashes[3] and
- hashes[1] != hashes[4] and len(specs) > 4)
+ mpileaks_mpich = database.mock.db.query_one('mpileaks ^mpich')
+ mpich = database.mock.db.query_one('mpich')
- inputs = [specs[0].name + '/' + hashes[3],
- specs[1].name + '^' + specs[4].name + '/' + hashes[0],
- specs[1].name + '^' + specs[4].name + '/' + hashes[1]]
- self._check_raises(InvalidHashError, inputs)
+ # name + incompatible hash
+ self._check_raises(InvalidHashError, [
+ 'zmpi /' + mpich.dag_hash(),
+ 'mpich /' + zmpi.dag_hash()])
+
+ # name + dep + incompatible hash
+ self._check_raises(InvalidHashError, [
+ 'mpileaks ^mpich /' + mpileaks_zmpi.dag_hash(),
+ 'mpileaks ^zmpi /' + mpileaks_mpich.dag_hash()])
def test_nonexistent_hash(self, database):
- # This test uses database to make sure we don't accidentally access
- # real installs, however unlikely
+ """Ensure we get errors for nonexistant hashes."""
specs = database.mock.db.query()
- hashes = [s._hash for s in specs] # Preserves order of elements
- # Make sure the database is as expected
- assert 'abc123' not in [h[:6] for h in hashes]
+ # This hash shouldn't be in the test DB. What are the odds :)
+ no_such_hash = 'aaaaaaaaaaaaaaa'
+ hashes = [s._hash for s in specs]
+ assert no_such_hash not in [h[:len(no_such_hash)] for h in hashes]
- nonexistant_hashes = ['/abc123',
- specs[0].name + '/abc123']
- self._check_raises(SystemExit, nonexistant_hashes)
+ self._check_raises(NoSuchHashError, [
+ '/' + no_such_hash,
+ 'mpileaks /' + no_such_hash])
def test_redundant_spec(self, database):
- specs = database.mock.db.query()
- hashes = [s._hash for s in specs] # Preserves order of elements
+ """Check that redundant spec constraints raise errors.
+
+ TODO (TG): does this need to be an error? Or should concrete
+ specs only raise errors if constraints cause a contradiction?
+
+ """
+ mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
+ callpath_zmpi = database.mock.db.query_one('callpath ^zmpi')
+ dyninst = database.mock.db.query_one('dyninst')
+
+ mpileaks_mpich2 = database.mock.db.query_one('mpileaks ^mpich2')
+
+ redundant_specs = [
+ # redudant compiler
+ '/' + mpileaks_zmpi.dag_hash() + '%' + str(mpileaks_zmpi.compiler),
+
+ # redudant version
+ 'mpileaks/' + mpileaks_mpich2.dag_hash() +
+ '@' + str(mpileaks_mpich2.version),
+
+ # redundant dependency
+ 'callpath /' + callpath_zmpi.dag_hash() + '^ libelf',
- # Make sure the database is as expected
- assert len(specs) > 3
+ # redundant flags
+ '/' + dyninst.dag_hash() + ' cflags="-O3 -fPIC"']
- redundant_specs = ['/' + hashes[0] + '%' + str(specs[0].compiler),
- specs[1].name + '/' + hashes[1] +
- '@' + str(specs[1].version),
- specs[2].name + '/' + hashes[2] + '^ libelf',
- '/' + hashes[3] + ' cflags="-O3 -fPIC"']
self._check_raises(RedundantSpecError, redundant_specs)
def test_duplicate_variant(self):
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index e913dc8412..0bcd2de3cf 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -27,6 +27,8 @@
YAML format preserves DAG informatoin in the spec.
"""
+from collections import Iterable, Mapping
+
import spack.util.spack_json as sjson
import spack.util.spack_yaml as syaml
from spack.spec import Spec
@@ -78,8 +80,6 @@ def test_using_ordered_dict(builtin_mock):
versions and processes.
"""
def descend_and_check(iterable, level=0):
- from spack.util.spack_yaml import syaml_dict
- from collections import Iterable, Mapping
if isinstance(iterable, Mapping):
assert isinstance(iterable, syaml_dict)
return descend_and_check(iterable.values(), level=level + 1)
@@ -95,7 +95,12 @@ def test_using_ordered_dict(builtin_mock):
for spec in specs:
dag = Spec(spec)
dag.normalize()
+ from pprint import pprint
+ pprint(dag.to_node_dict())
+ break
+
level = descend_and_check(dag.to_node_dict())
+
# level just makes sure we are doing something here
assert level >= 5
diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py
deleted file mode 100644
index 5f5cf555ae..0000000000
--- a/lib/spack/spack/test/url_extrapolate.py
+++ /dev/null
@@ -1,101 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-"""Tests ability of spack to extrapolate URL versions from
-existing versions.
-"""
-import unittest
-
-import spack.url as url
-
-
-class UrlExtrapolateTest(unittest.TestCase):
-
- def check_url(self, base, version, new_url):
- self.assertEqual(url.substitute_version(base, version), new_url)
-
- def test_libelf_version(self):
- base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
- self.check_url(base, '0.8.13', base)
- self.check_url(
- base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
- self.check_url(
- base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
- self.check_url(
- base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
-
- def test_libdwarf_version(self):
- base = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
- self.check_url(base, '20130729', base)
- self.check_url(
- base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
-
- def test_dyninst_version(self):
- # Dyninst has a version twice in the URL.
- base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
- self.check_url(base, '8.1.2', base)
- self.check_url(base, '8.2',
- "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.2/DyninstAPI-8.2.tgz")
- self.check_url(base, '8.3.1',
- "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz")
-
- def test_partial_version_prefix(self):
- # Test now with a partial prefix earlier in the URL -- this is
- # hard to figure out so Spack only substitutes the last
- # instance of the version.
- base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.2.tgz"
- self.check_url(base, '8.1.2', base)
- self.check_url(base, '8.1.4',
- "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.4.tgz")
- self.check_url(base, '8.2',
- "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.2.tgz")
- self.check_url(base, '8.3.1',
- "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz")
-
- def test_scalasca_partial_version(self):
- # Note that this probably doesn't actually work, but sites are
- # inconsistent about their directory structure, so it's not
- # clear what is right. This test is for consistency and to
- # document behavior. If you figure out a good way to handle
- # this case, fix the tests too.
- self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
- 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
- self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
- 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
-
- def test_mpileaks_version(self):
- self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
- 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz')
-
- def test_gcc(self):
- self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7',
- 'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2')
- self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7',
- 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
-
- def test_github_raw(self):
- self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7',
- 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
- self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '4.7',
- 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v4.7.tgz?raw=true')
diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py
index 8913de94d0..2af7c6ae0b 100644
--- a/lib/spack/spack/test/url_parse.py
+++ b/lib/spack/spack/test/url_parse.py
@@ -22,246 +22,667 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""\
-This file has a bunch of versions tests taken from the excellent version
-detection in Homebrew.
-"""
+"""Tests Spack's ability to parse the name and version of a package
+based on its URL."""
+
+import os
import unittest
-import spack.url as url
+from spack.url import *
-class UrlParseTest(unittest.TestCase):
+class UrlStripVersionSuffixesTest(unittest.TestCase):
+ """Tests for spack.url.strip_version_suffixes"""
- def assert_not_detected(self, string):
- self.assertRaises(
- url.UndetectableVersionError, url.parse_name_and_version, string)
+ def check(self, before, after):
+ stripped = strip_version_suffixes(before)
+ self.assertEqual(stripped, after)
- def check(self, name, v, string, **kwargs):
- # Make sure correct name and version are extracted.
- parsed_name, parsed_v = url.parse_name_and_version(string)
- self.assertEqual(parsed_name, name)
- self.assertEqual(parsed_v, url.Version(v))
+ def test_no_suffix(self):
+ self.check('rgb-1.0.6',
+ 'rgb-1.0.6')
- # Some URLs (like boost) are special and need to override the
- # built-in functionality.
- if kwargs.get('no_check_url', False):
- return
+ def test_misleading_prefix(self):
+ self.check('jpegsrc.v9b',
+ 'jpegsrc.v9b')
+ self.check('turbolinux702',
+ 'turbolinux702')
+ self.check('converge_install_2.3.16',
+ 'converge_install_2.3.16')
- # Make sure Spack formulates the right URL when we try to
- # build one with a specific version.
- self.assertEqual(string, url.substitute_version(string, v))
+ # Download type
- def test_wwwoffle_version(self):
- self.check(
- 'wwwoffle', '2.9h',
- 'http://www.gedanken.demon.co.uk/download-wwwoffle/wwwoffle-2.9h.tgz')
+ def test_src(self):
+ self.check('apache-ant-1.9.7-src',
+ 'apache-ant-1.9.7')
+ self.check('go1.7.4.src',
+ 'go1.7.4')
+
+ def test_source(self):
+ self.check('bowtie2-2.2.5-source',
+ 'bowtie2-2.2.5')
+ self.check('grib_api-1.17.0-Source',
+ 'grib_api-1.17.0')
+
+ def test_full(self):
+ self.check('julia-0.4.3-full',
+ 'julia-0.4.3')
+
+ def test_bin(self):
+ self.check('apache-maven-3.3.9-bin',
+ 'apache-maven-3.3.9')
+
+ def test_binary(self):
+ self.check('Jmol-14.8.0-binary',
+ 'Jmol-14.8.0')
+
+ def test_gem(self):
+ self.check('rubysl-date-2.0.9.gem',
+ 'rubysl-date-2.0.9')
+
+ def test_tar(self):
+ self.check('gromacs-4.6.1-tar',
+ 'gromacs-4.6.1')
+
+ def test_sh(self):
+ self.check('Miniconda2-4.3.11-Linux-x86_64.sh',
+ 'Miniconda2-4.3.11')
+
+ # Download version
+
+ def test_stable(self):
+ self.check('libevent-2.0.21-stable',
+ 'libevent-2.0.21')
+
+ def test_final(self):
+ self.check('2.6.7-final',
+ '2.6.7')
+
+ def test_rel(self):
+ self.check('v1.9.5.1rel',
+ 'v1.9.5.1')
+
+ def test_orig(self):
+ self.check('dash_0.5.5.1.orig',
+ 'dash_0.5.5.1')
+
+ def test_plus(self):
+ self.check('ncbi-blast-2.6.0+-src',
+ 'ncbi-blast-2.6.0')
+
+ # License
+
+ def test_gpl(self):
+ self.check('cppad-20170114.gpl',
+ 'cppad-20170114')
+
+ # OS
+
+ def test_linux(self):
+ self.check('astyle_2.04_linux',
+ 'astyle_2.04')
+
+ def test_unix(self):
+ self.check('install-tl-unx',
+ 'install-tl')
+
+ def test_macos(self):
+ self.check('astyle_1.23_macosx',
+ 'astyle_1.23')
+ self.check('haxe-2.08-osx',
+ 'haxe-2.08')
+
+ # PyPI
+
+ def test_wheel(self):
+ self.check('entrypoints-0.2.2-py2.py3-none-any.whl',
+ 'entrypoints-0.2.2')
+ self.check('numpy-1.12.0-cp27-cp27m-macosx_10_6_intel.macosx_10_9_intel.macosx_10_9_x86_64.macosx_10_10_intel.macosx_10_10_x86_64.whl', # noqa
+ 'numpy-1.12.0')
+
+ def test_exe(self):
+ self.check('PyYAML-3.12.win-amd64-py3.5.exe',
+ 'PyYAML-3.12')
+
+ # Combinations of multiple patterns
+
+ def test_complex_all(self):
+ self.check('p7zip_9.04_src_all',
+ 'p7zip_9.04')
+
+ def test_complex_run(self):
+ self.check('cuda_8.0.44_linux.run',
+ 'cuda_8.0.44')
+
+ def test_complex_file(self):
+ self.check('ack-2.14-single-file',
+ 'ack-2.14')
+
+ def test_complex_jar(self):
+ self.check('antlr-3.4-complete.jar',
+ 'antlr-3.4')
+
+ def test_complex_oss(self):
+ self.check('tbb44_20160128oss_src_0',
+ 'tbb44_20160128')
+
+ def test_complex_darwin(self):
+ self.check('ghc-7.0.4-x86_64-apple-darwin',
+ 'ghc-7.0.4')
+ self.check('ghc-7.0.4-i386-apple-darwin',
+ 'ghc-7.0.4')
+
+ def test_complex_arch(self):
+ self.check('VizGlow_v2.2alpha17-R21November2016-Linux-x86_64-Install',
+ 'VizGlow_v2.2alpha17-R21November2016')
+ self.check('jdk-8u92-linux-x64',
+ 'jdk-8u92')
+ self.check('cuda_6.5.14_linux_64.run',
+ 'cuda_6.5.14')
+
+ def test_complex_with(self):
+ self.check('mafft-7.221-with-extensions-src',
+ 'mafft-7.221')
+ self.check('spark-2.0.0-bin-without-hadoop',
+ 'spark-2.0.0')
+
+ def test_complex_public(self):
+ self.check('dakota-6.3-public.src',
+ 'dakota-6.3')
+
+ def test_complex_universal(self):
+ self.check('synergy-1.3.6p2-MacOSX-Universal',
+ 'synergy-1.3.6p2')
+
+
+class UrlStripNameSuffixesTest(unittest.TestCase):
+ """Tests for spack.url.strip_name_suffixes"""
+
+ def check(self, before, version, after):
+ stripped = strip_name_suffixes(before, version)
+ self.assertEqual(stripped, after)
+
+ def test_no_suffix(self):
+ self.check('rgb-1.0.6', '1.0.6',
+ 'rgb')
+ self.check('nauty26r7', '26r7',
+ 'nauty')
+
+ # Download type
+
+ def test_install(self):
+ self.check('converge_install_2.3.16', '2.3.16',
+ 'converge')
+
+ def test_src(self):
+ self.check('jpegsrc.v9b', '9b',
+ 'jpeg')
+
+ def test_std(self):
+ self.check('ghostscript-fonts-std-8.11', '8.11',
+ 'ghostscript-fonts')
+
+ # Download version
+
+ def test_snapshot(self):
+ self.check('gts-snapshot-121130', '121130',
+ 'gts')
+
+ def test_distrib(self):
+ self.check('zoltan_distrib_v3.83', '3.83',
+ 'zoltan')
+
+ # VCS
- def test_version_sourceforge_download(self):
+ def test_bazaar(self):
+ self.check('libvterm-0+bzr681', '681',
+ 'libvterm')
+
+ # License
+
+ def test_gpl(self):
+ self.check('PyQt-x11-gpl-4.11.3', '4.11.3',
+ 'PyQt-x11')
+
+
+class UrlParseOffsetTest(unittest.TestCase):
+
+ def check(self, name, noffset, ver, voffset, path):
+ # Make sure parse_name_offset and parse_name_version are working
+ v, vstart, vlen, vi, vre = parse_version_offset(path)
+ n, nstart, nlen, ni, nre = parse_name_offset(path, v)
+
+ self.assertEqual(n, name)
+ self.assertEqual(v, ver)
+ self.assertEqual(nstart, noffset)
+ self.assertEqual(vstart, voffset)
+
+ def test_name_in_path(self):
self.check(
- 'foo-bar', '1.21',
- 'http://sourceforge.net/foo_bar-1.21.tar.gz/download')
+ 'antlr', 25, '2.7.7', 40,
+ 'https://github.com/antlr/antlr/tarball/v2.7.7')
+
+ def test_name_in_stem(self):
self.check(
- 'foo-bar', '1.21',
- 'http://sf.net/foo_bar-1.21.tar.gz/download')
+ 'gmp', 32, '6.0.0a', 36,
+ 'https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2')
- def test_no_version(self):
- self.assert_not_detected('http://example.com/blah.tar')
- self.assert_not_detected('foo')
+ def test_name_in_suffix(self):
+ # Don't think I've ever seen one of these before
+ # We don't look for it, so it would probably fail anyway
+ pass
- def test_version_all_dots(self):
+ def test_version_in_path(self):
self.check(
- 'foo-bar-la', '1.14', 'http://example.com/foo.bar.la.1.14.zip')
+ 'nextflow', 31, '0.20.1', 59,
+ 'https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow')
- def test_version_underscore_separator(self):
+ def test_version_in_stem(self):
self.check(
- 'grc', '1.1',
- 'http://example.com/grc_1.1.tar.gz')
-
- def test_boost_version_style(self):
+ 'zlib', 24, '1.2.10', 29,
+ 'http://zlib.net/fossils/zlib-1.2.10.tar.gz')
+ self.check(
+ 'slepc', 51, '3.6.2', 57,
+ 'http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz')
self.check(
- 'boost', '1.39.0',
- 'http://example.com/boost_1_39_0.tar.bz2',
- no_check_url=True)
+ 'cloog', 61, '0.18.1', 67,
+ 'http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz')
+ self.check(
+ 'libxc', 58, '2.2.2', 64,
+ 'http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz')
- def test_erlang_version_style(self):
+ def test_version_in_suffix(self):
+ self.check(
+ 'swiftsim', 36, '0.3.0', 76,
+ 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0')
self.check(
- 'otp', 'R13B',
- 'http://erlang.org/download/otp_src_R13B.tar.gz')
+ 'sionlib', 30, '1.7.1', 59,
+ 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1')
- def test_another_erlang_version_style(self):
+ def test_regex_in_name(self):
self.check(
- 'otp', 'R15B01',
- 'https://github.com/erlang/otp/tarball/OTP_R15B01')
+ 'voro++', 40, '0.4.6', 47,
+ 'http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz')
+
+
+class UrlParseNameAndVersionTest(unittest.TestCase):
+
+ def assert_not_detected(self, string):
+ self.assertRaises(
+ UndetectableVersionError, parse_name_and_version, string)
+
+ def check(self, name, v, string, **kwargs):
+ # Make sure correct name and version are extracted.
+ parsed_name, parsed_v = parse_name_and_version(string)
+ self.assertEqual(parsed_name, name)
+ self.assertEqual(parsed_v, Version(v))
+
+ # Make sure Spack formulates the right URL when we try to
+ # build one with a specific version.
+ self.assertEqual(string, substitute_version(string, v))
+
+ # Common Repositories
- def test_yet_another_erlang_version_style(self):
+ def test_github_downloads(self):
+ # name/archive/ver.ver
self.check(
- 'otp', 'R15B03-1',
- 'https://github.com/erlang/otp/tarball/OTP_R15B03-1')
+ 'nco', '4.6.2',
+ 'https://github.com/nco/nco/archive/4.6.2.tar.gz')
+ # name/archive/vver.ver
+ self.check(
+ 'vim', '8.0.0134',
+ 'https://github.com/vim/vim/archive/v8.0.0134.tar.gz')
+ # name/archive/name-ver.ver
+ self.check(
+ 'oce', '0.18',
+ 'https://github.com/tpaviot/oce/archive/OCE-0.18.tar.gz')
+ # name/releases/download/vver/name-ver.ver
+ self.check(
+ 'libmesh', '1.0.0',
+ 'https://github.com/libMesh/libmesh/releases/download/v1.0.0/libmesh-1.0.0.tar.bz2')
+ # name/tarball/vver.ver
+ self.check(
+ 'git', '2.7.1',
+ 'https://github.com/git/git/tarball/v2.7.1')
+ # name/zipball/vver.ver
+ self.check(
+ 'git', '2.7.1',
+ 'https://github.com/git/git/zipball/v2.7.1')
- def test_p7zip_version_style(self):
+ def test_gitlab_downloads(self):
+ # name/repository/archive.ext?ref=vver.ver
+ self.check(
+ 'swiftsim', '0.3.0',
+ 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0')
+ # name/repository/archive.ext?ref=name-ver.ver
self.check(
- 'p7zip', '9.04',
- 'http://kent.dl.sourceforge.net/sourceforge/p7zip/p7zip_9.04_src_all.tar.bz2')
+ 'icet', '1.2.3',
+ 'https://gitlab.kitware.com/icet/icet/repository/archive.tar.gz?ref=IceT-1.2.3')
- def test_new_github_style(self):
+ def test_bitbucket_downloads(self):
+ # name/get/ver.ver
self.check(
- 'libnet', '1.1.4',
- 'https://github.com/sam-github/libnet/tarball/libnet-1.1.4')
+ 'eigen', '3.2.7',
+ 'https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2')
+ # name/get/vver.ver
+ self.check(
+ 'hoomd-blue', '1.3.3',
+ 'https://bitbucket.org/glotzer/hoomd-blue/get/v1.3.3.tar.bz2')
+ # name/downloads/name-ver.ver
+ self.check(
+ 'dolfin', '2016.1.0',
+ 'https://bitbucket.org/fenics-project/dolfin/downloads/dolfin-2016.1.0.tar.gz')
- def test_gloox_beta_style(self):
+ def test_sourceforge_downloads(self):
+ # name-ver.ver
self.check(
- 'gloox', '1.0-beta7',
- 'http://camaya.net/download/gloox-1.0-beta7.tar.bz2')
+ 'libpng', '1.6.27',
+ 'http://download.sourceforge.net/libpng/libpng-1.6.27.tar.gz')
+ self.check(
+ 'lcms2', '2.6',
+ 'http://downloads.sourceforge.net/project/lcms/lcms/2.6/lcms2-2.6.tar.gz')
+ self.check(
+ 'modules', '3.2.10',
+ 'http://prdownloads.sourceforge.net/modules/modules-3.2.10.tar.gz')
+ # name-ver.ver.ext/download
+ self.check(
+ 'glew', '2.0.0',
+ 'https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download')
- def test_sphinx_beta_style(self):
+ def test_cran_downloads(self):
+ # name.name_ver.ver-ver.ver
self.check(
- 'sphinx', '1.10-beta',
- 'http://sphinxsearch.com/downloads/sphinx-1.10-beta.tar.gz')
+ 'TH.data', '1.0-8',
+ 'https://cran.r-project.org/src/contrib/TH.data_1.0-8.tar.gz')
+ self.check(
+ 'knitr', '1.14',
+ 'https://cran.rstudio.com/src/contrib/knitr_1.14.tar.gz')
+ self.check(
+ 'devtools', '1.12.0',
+ 'https://cloud.r-project.org/src/contrib/devtools_1.12.0.tar.gz')
- def test_astyle_verson_style(self):
+ def test_pypi_downloads(self):
+ # name.name_name-ver.ver
+ self.check(
+ '3to2', '1.1.1',
+ 'https://pypi.python.org/packages/source/3/3to2/3to2-1.1.1.zip')
self.check(
- 'astyle', '1.23',
- 'http://kent.dl.sourceforge.net/sourceforge/astyle/astyle_1.23_macosx.tar.gz')
+ 'mpmath', '0.19',
+ 'https://pypi.python.org/packages/source/m/mpmath/mpmath-all-0.19.tar.gz')
+ self.check(
+ 'pandas', '0.16.0',
+ 'https://pypi.python.org/packages/source/p/pandas/pandas-0.16.0.tar.gz#md5=bfe311f05dc0c351f8955fbd1e296e73')
+ self.check(
+ 'sphinx_rtd_theme', '0.1.10a0',
+ 'https://pypi.python.org/packages/da/6b/1b75f13d8aa3333f19c6cdf1f0bc9f52ea739cae464fbee050307c121857/sphinx_rtd_theme-0.1.10a0.tar.gz')
+ self.check(
+ 'backports.ssl_match_hostname', '3.5.0.1',
+ 'https://pypi.io/packages/source/b/backports.ssl_match_hostname/backports.ssl_match_hostname-3.5.0.1.tar.gz')
- def test_version_dos2unix(self):
+ def test_bazaar_downloads(self):
self.check(
- 'dos2unix', '3.1',
- 'http://www.sfr-fresh.com/linux/misc/dos2unix-3.1.tar.gz')
+ 'libvterm', '681',
+ 'http://www.leonerd.org.uk/code/libvterm/libvterm-0+bzr681.tar.gz')
- def test_version_internal_dash(self):
+ # Common Tarball Formats
+
+ def test_version_only(self):
+ # ver.ver
+ self.check(
+ 'eigen', '3.2.7',
+ 'https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2')
+ # ver.ver-ver
+ self.check(
+ 'ImageMagick', '7.0.2-7',
+ 'https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz')
+ # vver.ver
self.check(
- 'foo-arse', '1.1-2',
- 'http://example.com/foo-arse-1.1-2.tar.gz')
+ 'CGNS', '3.3.0',
+ 'https://github.com/CGNS/CGNS/archive/v3.3.0.tar.gz')
+ # vver_ver
+ self.check(
+ 'luafilesystem', '1_6_3',
+ 'https://github.com/keplerproject/luafilesystem/archive/v1_6_3.tar.gz')
- def test_version_single_digit(self):
+ def test_no_separators(self):
+ # namever
+ self.check(
+ 'turbolinux', '702',
+ 'file://{0}/turbolinux702.tar.gz'.format(os.getcwd()))
self.check(
- 'foo-bar', '45',
- 'http://example.com/foo_bar.45.tar.gz')
+ 'nauty', '26r7',
+ 'http://pallini.di.uniroma1.it/nauty26r7.tar.gz')
- def test_noseparator_single_digit(self):
+ def test_dashes_only(self):
+ # name-name-ver-ver
+ self.check(
+ 'Trilinos', '12-10-1',
+ 'https://github.com/trilinos/Trilinos/archive/trilinos-release-12-10-1.tar.gz')
+ self.check(
+ 'panda', '2016-03-07',
+ 'http://comopt.ifi.uni-heidelberg.de/software/PANDA/downloads/panda-2016-03-07.tar')
self.check(
- 'foo-bar', '45',
- 'http://example.com/foo_bar45.tar.gz')
+ 'gts', '121130',
+ 'http://gts.sourceforge.net/tarballs/gts-snapshot-121130.tar.gz')
+ self.check(
+ 'cdd', '061a',
+ 'http://www.cs.mcgill.ca/~fukuda/download/cdd/cdd-061a.tar.gz')
- def test_version_developer_that_hates_us_format(self):
+ def test_underscores_only(self):
+ # name_name_ver_ver
+ self.check(
+ 'tinyxml', '2_6_2',
+ 'https://sourceforge.net/projects/tinyxml/files/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz')
+ self.check(
+ 'boost', '1_55_0',
+ 'http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2')
self.check(
- 'foo-bar-la', '1.2.3',
- 'http://example.com/foo-bar-la.1.2.3.tar.gz')
+ 'yorick', '2_2_04',
+ 'https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz')
+ # name_namever_ver
+ self.check(
+ 'tbb', '44_20160413',
+ 'https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160413oss_src.tgz')
- def test_version_regular(self):
+ def test_dots_only(self):
+ # name.name.ver.ver
+ self.check(
+ 'prank', '150803',
+ 'http://wasabiapp.org/download/prank/prank.source.150803.tgz')
+ self.check(
+ 'jpeg', '9b',
+ 'http://www.ijg.org/files/jpegsrc.v9b.tar.gz')
+ self.check(
+ 'openjpeg', '2.1',
+ 'https://github.com/uclouvain/openjpeg/archive/version.2.1.tar.gz')
+ # name.namever.ver
+ self.check(
+ 'atlas', '3.11.34',
+ 'http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2')
self.check(
- 'foo-bar', '1.21',
- 'http://example.com/foo_bar-1.21.tar.gz')
+ 'visit', '2.10.1',
+ 'http://portal.nersc.gov/project/visit/releases/2.10.1/visit2.10.1.tar.gz')
+ self.check(
+ 'geant', '4.10.01.p03',
+ 'http://geant4.cern.ch/support/source/geant4.10.01.p03.tar.gz')
+ self.check(
+ 'tcl', '8.6.5',
+ 'http://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz')
- def test_version_gitlab(self):
+ def test_dash_dot(self):
+ # name-name-ver.ver
+ # digit in name
self.check(
- 'vtk', '7.0.0',
- 'https://gitlab.kitware.com/vtk/vtk/repository/'
- 'archive.tar.bz2?ref=v7.0.0')
+ 'm4', '1.4.17',
+ 'https://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz')
+ # letter in version
self.check(
- 'icet', '1.2.3',
- 'https://gitlab.kitware.com/icet/icet/repository/'
- 'archive.tar.gz?ref=IceT-1.2.3')
+ 'gmp', '6.0.0a',
+ 'https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2')
+ # version starts with 'v'
+ self.check(
+ 'LaunchMON', '1.0.2',
+ 'https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz')
+ # name-ver-ver.ver
self.check(
- 'foo', '42.1337',
- 'http://example.com/org/foo/repository/'
- 'archive.zip?ref=42.1337bar')
+ 'libedit', '20150325-3.1',
+ 'http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz')
- def test_version_github(self):
+ def test_dash_underscore(self):
+ # name-name-ver_ver
self.check(
- 'yajl', '1.0.5',
- 'http://github.com/lloyd/yajl/tarball/1.0.5')
+ 'icu4c', '57_1',
+ 'http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz')
- def test_version_github_with_high_patch_number(self):
+ def test_underscore_dot(self):
+ # name_name_ver.ver
self.check(
- 'yajl', '1.2.34',
- 'http://github.com/lloyd/yajl/tarball/v1.2.34')
+ 'superlu_dist', '4.1',
+ 'http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz')
+ self.check(
+ 'pexsi', '0.9.0',
+ 'https://math.berkeley.edu/~linlin/pexsi/download/pexsi_v0.9.0.tar.gz')
+ # name_name.ver.ver
+ self.check(
+ 'fer', '696',
+ 'ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.v696.tar.gz')
- def test_yet_another_version(self):
+ def test_dash_dot_dash_dot(self):
+ # name-name-ver.ver-ver.ver
+ self.check(
+ 'sowing', '1.1.23-p1',
+ 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/sowing-1.1.23-p1.tar.gz')
self.check(
- 'mad', '0.15.1b',
- 'http://example.com/mad-0.15.1b.tar.gz')
+ 'bib2xhtml', '3.0-15-gf506',
+ 'http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz')
+ # namever.ver-ver.ver
+ self.check(
+ 'go', '1.4-bootstrap-20161024',
+ 'https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz')
- def test_lame_version_style(self):
+ def test_underscore_dash_dot(self):
+ # name_name-ver.ver
+ self.check(
+ 'the_silver_searcher', '0.32.0',
+ 'http://geoff.greer.fm/ag/releases/the_silver_searcher-0.32.0.tar.gz')
self.check(
- 'lame', '398-2',
- 'http://kent.dl.sourceforge.net/sourceforge/lame/lame-398-2.tar.gz')
+ 'sphinx_rtd_theme', '0.1.10a0',
+ 'https://pypi.python.org/packages/source/s/sphinx_rtd_theme/sphinx_rtd_theme-0.1.10a0.tar.gz')
- def test_ruby_version_style(self):
+ def test_dot_underscore_dot_dash_dot(self):
+ # name.name_ver.ver-ver.ver
self.check(
- 'ruby', '1.9.1-p243',
- 'ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.1-p243.tar.gz')
+ 'TH.data', '1.0-8',
+ 'https://cran.r-project.org/src/contrib/TH.data_1.0-8.tar.gz')
+ self.check(
+ 'XML', '3.98-1.4',
+ 'https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz')
- def test_omega_version_style(self):
+ def test_dash_dot_underscore_dot(self):
+ # name-name-ver.ver_ver.ver
+ self.check(
+ 'pypar', '2.1.5_108',
+ 'https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/pypar/pypar-2.1.5_108.tgz')
+ # name-namever.ver_ver.ver
self.check(
- 'omega', '0.80.2',
- 'http://www.alcyone.com/binaries/omega/omega-0.80.2-src.tar.gz')
+ 'STAR-CCM+', '11.06.010_02',
+ 'file://{0}/STAR-CCM+11.06.010_02_linux-x86_64.tar.gz'.format(os.getcwd()))
- def test_rc_style(self):
+ # Weird URLS
+
+ def test_version_in_path(self):
+ # github.com/repo/name/releases/download/name-vver/name
self.check(
- 'libvorbis', '1.2.2rc1',
- 'http://downloads.xiph.org/releases/vorbis/libvorbis-1.2.2rc1.tar.bz2')
+ 'nextflow', '0.20.1',
+ 'https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow')
- def test_dash_rc_style(self):
+ def test_suffix_queries(self):
self.check(
- 'js', '1.8.0-rc1',
- 'http://ftp.mozilla.org/pub/mozilla.org/js/js-1.8.0-rc1.tar.gz')
+ 'swiftsim', '0.3.0',
+ 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0')
+ self.check(
+ 'sionlib', '1.7.1',
+ 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1')
- def test_angband_version_style(self):
+ def test_stem_queries(self):
+ self.check(
+ 'slepc', '3.6.2',
+ 'http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz')
self.check(
- 'angband', '3.0.9b',
- 'http://rephial.org/downloads/3.0/angband-3.0.9b-src.tar.gz')
+ 'otf', '1.12.5salmon',
+ 'http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz')
- def test_stable_suffix(self):
+ def test_single_character_name(self):
self.check(
- 'libevent', '1.4.14b',
- 'http://www.monkey.org/~provos/libevent-1.4.14b-stable.tar.gz')
+ 'R', '3.3.2',
+ 'https://cloud.r-project.org/src/base/R-3/R-3.3.2.tar.gz')
+
+ def test_single_digit_version(self):
+ pass
- def test_debian_style_1(self):
+ def test_name_starts_with_digit(self):
self.check(
- 'sl', '3.03',
- 'http://ftp.de.debian.org/debian/pool/main/s/sl/sl_3.03.orig.tar.gz')
+ '3to2', '1.1.1',
+ 'https://pypi.python.org/packages/source/3/3to2/3to2-1.1.1.zip')
- def test_debian_style_2(self):
+ def plus_in_name(self):
self.check(
- 'mmv', '1.01b',
- 'http://ftp.de.debian.org/debian/pool/main/m/mmv/mmv_1.01b.orig.tar.gz')
+ 'gtk+', '2.24.31',
+ 'http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.31.tar.xz')
+ self.check(
+ 'voro++', '0.4.6',
+ 'http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz')
+
+ def test_no_version(self):
+ self.assert_not_detected('http://www.netlib.org/blas/blast-forum/cblas.tgz')
+ self.assert_not_detected('http://www.netlib.org/voronoi/triangle.zip')
- def test_imagemagick_style(self):
+ def test_download_php(self):
+ # Name comes before download.php
+ self.check(
+ 'sionlib', '1.7.1',
+ 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1')
+ # Ignore download.php
+ self.check(
+ 'slepc', '3.6.2',
+ 'http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz')
self.check(
- 'imagemagick', '6.7.5-7',
+ 'ScientificPython', '2.8.1',
+ 'https://sourcesup.renater.fr/frs/download.php/file/4411/ScientificPython-2.8.1.tar.gz')
- 'http://downloads.sf.net/project/machomebrew/mirror/ImageMagick-6.7.5-7.tar.bz2')
+ def test_gloox_beta_style(self):
+ self.check(
+ 'gloox', '1.0-beta7',
+ 'http://camaya.net/download/gloox-1.0-beta7.tar.bz2')
- def test_dash_version_dash_style(self):
+ def test_sphinx_beta_style(self):
self.check(
- 'antlr', '3.4',
- 'http://www.antlr.org/download/antlr-3.4-complete.jar')
+ 'sphinx', '1.10-beta',
+ 'http://sphinxsearch.com/downloads/sphinx-1.10-beta.tar.gz')
- def test_apache_version_style(self):
+ def test_ruby_version_style(self):
self.check(
- 'apache-cassandra', '1.2.0-rc2',
- 'http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz')
+ 'ruby', '1.9.1-p243',
+ 'ftp://ftp.ruby-lang.org/pub/ruby/1.9/ruby-1.9.1-p243.tar.gz')
- def test_jpeg_style(self):
+ def test_rc_style(self):
self.check(
- 'jpegsrc', '8d',
- 'http://www.ijg.org/files/jpegsrc.v8d.tar.gz')
+ 'libvorbis', '1.2.2rc1',
+ 'http://downloads.xiph.org/releases/vorbis/libvorbis-1.2.2rc1.tar.bz2')
- def test_pypy_version(self):
+ def test_dash_rc_style(self):
self.check(
- 'pypy', '1.4.1',
- 'http://pypy.org/download/pypy-1.4.1-osx.tar.bz2')
+ 'js', '1.8.0-rc1',
+ 'http://ftp.mozilla.org/pub/mozilla.org/js/js-1.8.0-rc1.tar.gz')
- def test_openssl_version(self):
+ def test_apache_version_style(self):
self.check(
- 'openssl', '0.9.8s',
- 'http://www.openssl.org/source/openssl-0.9.8s.tar.gz')
+ 'apache-cassandra', '1.2.0-rc2',
+ 'http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz')
def test_xaw3d_version(self):
self.check(
- 'xaw3d', '1.5E',
+ 'Xaw3d', '1.5E',
'ftp://ftp.visi.com/users/hawkeyd/X/Xaw3d-1.5E.tar.gz')
def test_fann_version(self):
@@ -269,16 +690,6 @@ class UrlParseTest(unittest.TestCase):
'fann', '2.1.0beta',
'http://downloads.sourceforge.net/project/fann/fann/2.1.0beta/fann-2.1.0beta.zip')
- def test_iges_version(self):
- self.check(
- 'grads', '2.0.1',
- 'ftp://iges.org/grads/2.0/grads-2.0.1-bin-darwin9.8-intel.tar.gz')
-
- def test_haxe_version(self):
- self.check(
- 'haxe', '2.08',
- 'http://haxe.org/file/haxe-2.08-osx.tar.gz')
-
def test_imap_version(self):
self.check(
'imap', '2007f',
@@ -289,26 +700,6 @@ class UrlParseTest(unittest.TestCase):
'suite3270', '3.3.12ga7',
'http://sourceforge.net/projects/x3270/files/x3270/3.3.12ga7/suite3270-3.3.12ga7-src.tgz')
- def test_synergy_version(self):
- self.check(
- 'synergy', '1.3.6p2',
- 'http://synergy.googlecode.com/files/synergy-1.3.6p2-MacOSX-Universal.zip')
-
- def test_mvapich2_19_version(self):
- self.check(
- 'mvapich2', '1.9',
- 'http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz')
-
- def test_mvapich2_20_version(self):
- self.check(
- 'mvapich2', '2.0',
- 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz')
-
- def test_hdf5_version(self):
- self.check(
- 'hdf5', '1.8.13',
- 'http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.13.tar.bz2')
-
def test_scalasca_version(self):
self.check(
'cube', '4.2.3',
@@ -317,55 +708,20 @@ class UrlParseTest(unittest.TestCase):
'cube', '4.3-TP1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz')
- def test_mpileaks_version(self):
- self.check(
- 'mpileaks', '1.0',
- 'https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz')
- self.check(
- 'mpileaks', '1.0',
- 'https://github.com/hpc/mpileaks/releases/download/1.0/mpileaks-1.0.tar.gz')
-
- def test_gcc_version(self):
- self.check(
- 'gcc', '4.4.7',
- 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
-
- def test_gcc_version_precedence(self):
- # prefer the version in the tarball, not in the url prefix.
- self.check(
- 'gcc', '4.4.7',
- 'http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.4.7.tar.bz2')
-
def test_github_raw_url(self):
self.check(
- 'powerparser', '2.0.7',
+ 'CLAMR', '2.0.7',
'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
- def test_r_xml_version(self):
+ def test_luaposix_version(self):
self.check(
- 'xml', '3.98-1.4',
- 'https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz')
+ 'luaposix', '33.4.0',
+ 'https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz')
def test_nco_version(self):
self.check(
'nco', '4.6.2-beta03',
'https://github.com/nco/nco/archive/4.6.2-beta03.tar.gz')
-
self.check(
'nco', '4.6.3-alpha04',
'https://github.com/nco/nco/archive/4.6.3-alpha04.tar.gz')
-
- def test_yorick_version(self):
- self.check(
- 'yorick', '2_2_04',
- 'https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz')
-
- def test_luaposix_version(self):
- self.check(
- 'luaposix', '33.4.0',
- 'https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz')
-
- def test_sionlib_version(self):
- self.check(
- 'sionlib', '1.7.1',
- 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1')
diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py
index ea6374e3d2..449a3b29bf 100644
--- a/lib/spack/spack/test/url_substitution.py
+++ b/lib/spack/spack/test/url_substitution.py
@@ -22,44 +22,64 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""\
-This test does sanity checks on substituting new versions into URLs
-"""
+"""Tests Spack's ability to substitute a different version into a URL."""
+
+import os
import unittest
-import spack.url as url
+from spack.url import substitute_version
+
+
+class UrlSubstitutionTest(unittest.TestCase):
+ def check(self, base, version, new_url):
+ self.assertEqual(substitute_version(base, version), new_url)
-base = "https://comp.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz"
-stem = "https://comp.llnl.gov/linear_solvers/download/hypre-"
+ def test_same_version(self):
+ # Ensures that substituting the same version results in the same URL
+ self.check(
+ 'http://www.mr511.de/software/libelf-0.8.13.tar.gz', '0.8.13',
+ 'http://www.mr511.de/software/libelf-0.8.13.tar.gz')
+ def test_different_version(self):
+ # Test a completely different version syntax
+ self.check(
+ 'http://www.prevanders.net/libdwarf-20130729.tar.gz', '8.12',
+ 'http://www.prevanders.net/libdwarf-8.12.tar.gz')
-class PackageSanityTest(unittest.TestCase):
+ def test_double_version(self):
+ # Test a URL where the version appears twice
+ # It should get substituted both times
+ self.check(
+ 'https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
+ 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz')
- def test_hypre_url_substitution(self):
- self.assertEqual(url.substitute_version(base, '2.9.0b'), base)
- self.assertEqual(
- url.substitute_version(base, '2.8.0b'), stem + "2.8.0b.tar.gz")
- self.assertEqual(
- url.substitute_version(base, '2.7.0b'), stem + "2.7.0b.tar.gz")
- self.assertEqual(
- url.substitute_version(base, '2.6.0b'), stem + "2.6.0b.tar.gz")
- self.assertEqual(
- url.substitute_version(base, '1.14.0b'), stem + "1.14.0b.tar.gz")
- self.assertEqual(
- url.substitute_version(base, '1.13.0b'), stem + "1.13.0b.tar.gz")
- self.assertEqual(
- url.substitute_version(base, '2.0.0'), stem + "2.0.0.tar.gz")
- self.assertEqual(
- url.substitute_version(base, '1.6.0'), stem + "1.6.0.tar.gz")
+ def test_partial_version_prefix(self):
+ # Test now with a partial prefix earlier in the URL
+ # This is hard to figure out so Spack only substitutes
+ # the last instance of the version
+ self.check(
+ 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2', '2.2.0',
+ 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.0.tar.bz2')
+ self.check(
+ 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2', '2.2',
+ 'https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.2.tar.bz2')
- def test_otf2_url_substitution(self):
- base = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz"
+ def test_no_separator(self):
+ # No separator between the name and version of the package
+ self.check(
+ 'file://{0}/turbolinux702.tar.gz'.format(os.getcwd()), '703',
+ 'file://{0}/turbolinux703.tar.gz'.format(os.getcwd()))
- self.assertEqual(url.substitute_version(base, '1.4'), base)
+ def test_github_raw(self):
+ self.check(
+ 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7',
+ 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
+ self.check(
+ 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '4.7',
+ 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v4.7.tgz?raw=true')
- self.assertEqual(
- url.substitute_version(base, '1.3.1'),
- "http://www.vi-hps.org/upload/packages/otf2/otf2-1.3.1.tar.gz")
- self.assertEqual(
- url.substitute_version(base, '1.2.1'),
- "http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz")
+ def test_regex(self):
+ # Package name contains regex characters
+ self.check(
+ 'http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz', '1.2.3',
+ 'http://math.lbl.gov/voro++/download/dir/voro++-1.2.3.tar.gz')
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index c1d427783c..71ea3af9e9 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -22,413 +22,453 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""
-These version tests were taken from the RPM source code.
+"""These version tests were taken from the RPM source code.
We try to maintain compatibility with RPM's version semantics
where it makes sense.
"""
-import unittest
+import pytest
from spack.version import *
-class VersionsTest(unittest.TestCase):
-
- def assert_ver_lt(self, a, b):
- a, b = ver(a), ver(b)
- self.assertTrue(a < b)
- self.assertTrue(a <= b)
- self.assertTrue(a != b)
- self.assertFalse(a == b)
- self.assertFalse(a > b)
- self.assertFalse(a >= b)
-
- def assert_ver_gt(self, a, b):
- a, b = ver(a), ver(b)
- self.assertTrue(a > b)
- self.assertTrue(a >= b)
- self.assertTrue(a != b)
- self.assertFalse(a == b)
- self.assertFalse(a < b)
- self.assertFalse(a <= b)
-
- def assert_ver_eq(self, a, b):
- a, b = ver(a), ver(b)
- self.assertFalse(a > b)
- self.assertTrue(a >= b)
- self.assertFalse(a != b)
- self.assertTrue(a == b)
- self.assertFalse(a < b)
- self.assertTrue(a <= b)
-
- def assert_in(self, needle, haystack):
- self.assertTrue(ver(needle) in ver(haystack))
-
- def assert_not_in(self, needle, haystack):
- self.assertFalse(ver(needle) in ver(haystack))
-
- def assert_canonical(self, canonical_list, version_list):
- self.assertEqual(ver(canonical_list), ver(version_list))
-
- def assert_overlaps(self, v1, v2):
- self.assertTrue(ver(v1).overlaps(ver(v2)))
-
- def assert_no_overlap(self, v1, v2):
- self.assertFalse(ver(v1).overlaps(ver(v2)))
-
- def assert_satisfies(self, v1, v2):
- self.assertTrue(ver(v1).satisfies(ver(v2)))
-
- def assert_does_not_satisfy(self, v1, v2):
- self.assertFalse(ver(v1).satisfies(ver(v2)))
-
- def check_intersection(self, expected, a, b):
- self.assertEqual(ver(expected), ver(a).intersection(ver(b)))
-
- def check_union(self, expected, a, b):
- self.assertEqual(ver(expected), ver(a).union(ver(b)))
-
- def test_two_segments(self):
- self.assert_ver_eq('1.0', '1.0')
- self.assert_ver_lt('1.0', '2.0')
- self.assert_ver_gt('2.0', '1.0')
- self.assert_ver_eq('develop', 'develop')
- self.assert_ver_lt('1.0', 'develop')
- self.assert_ver_gt('develop', '1.0')
-
- def test_three_segments(self):
- self.assert_ver_eq('2.0.1', '2.0.1')
- self.assert_ver_lt('2.0', '2.0.1')
- self.assert_ver_gt('2.0.1', '2.0')
-
- def test_alpha(self):
- # TODO: not sure whether I like this. 2.0.1a is *usually*
- # TODO: less than 2.0.1, but special-casing it makes version
- # TODO: comparison complicated. See version.py
- self.assert_ver_eq('2.0.1a', '2.0.1a')
- self.assert_ver_gt('2.0.1a', '2.0.1')
- self.assert_ver_lt('2.0.1', '2.0.1a')
-
- def test_patch(self):
- self.assert_ver_eq('5.5p1', '5.5p1')
- self.assert_ver_lt('5.5p1', '5.5p2')
- self.assert_ver_gt('5.5p2', '5.5p1')
- self.assert_ver_eq('5.5p10', '5.5p10')
- self.assert_ver_lt('5.5p1', '5.5p10')
- self.assert_ver_gt('5.5p10', '5.5p1')
-
- def test_num_alpha_with_no_separator(self):
- self.assert_ver_lt('10xyz', '10.1xyz')
- self.assert_ver_gt('10.1xyz', '10xyz')
- self.assert_ver_eq('xyz10', 'xyz10')
- self.assert_ver_lt('xyz10', 'xyz10.1')
- self.assert_ver_gt('xyz10.1', 'xyz10')
-
- def test_alpha_with_dots(self):
- self.assert_ver_eq('xyz.4', 'xyz.4')
- self.assert_ver_lt('xyz.4', '8')
- self.assert_ver_gt('8', 'xyz.4')
- self.assert_ver_lt('xyz.4', '2')
- self.assert_ver_gt('2', 'xyz.4')
-
- def test_nums_and_patch(self):
- self.assert_ver_lt('5.5p2', '5.6p1')
- self.assert_ver_gt('5.6p1', '5.5p2')
- self.assert_ver_lt('5.6p1', '6.5p1')
- self.assert_ver_gt('6.5p1', '5.6p1')
-
- def test_rc_versions(self):
- self.assert_ver_gt('6.0.rc1', '6.0')
- self.assert_ver_lt('6.0', '6.0.rc1')
-
- def test_alpha_beta(self):
- self.assert_ver_gt('10b2', '10a1')
- self.assert_ver_lt('10a2', '10b2')
-
- def test_double_alpha(self):
- self.assert_ver_eq('1.0aa', '1.0aa')
- self.assert_ver_lt('1.0a', '1.0aa')
- self.assert_ver_gt('1.0aa', '1.0a')
-
- def test_padded_numbers(self):
- self.assert_ver_eq('10.0001', '10.0001')
- self.assert_ver_eq('10.0001', '10.1')
- self.assert_ver_eq('10.1', '10.0001')
- self.assert_ver_lt('10.0001', '10.0039')
- self.assert_ver_gt('10.0039', '10.0001')
-
- def test_close_numbers(self):
- self.assert_ver_lt('4.999.9', '5.0')
- self.assert_ver_gt('5.0', '4.999.9')
-
- def test_date_stamps(self):
- self.assert_ver_eq('20101121', '20101121')
- self.assert_ver_lt('20101121', '20101122')
- self.assert_ver_gt('20101122', '20101121')
-
- def test_underscores(self):
- self.assert_ver_eq('2_0', '2_0')
- self.assert_ver_eq('2.0', '2_0')
- self.assert_ver_eq('2_0', '2.0')
-
- def test_rpm_oddities(self):
- self.assert_ver_eq('1b.fc17', '1b.fc17')
- self.assert_ver_lt('1b.fc17', '1.fc17')
- self.assert_ver_gt('1.fc17', '1b.fc17')
- self.assert_ver_eq('1g.fc17', '1g.fc17')
- self.assert_ver_gt('1g.fc17', '1.fc17')
- self.assert_ver_lt('1.fc17', '1g.fc17')
-
- # Stuff below here is not taken from RPM's tests and is
- # unique to spack
- def test_version_ranges(self):
- self.assert_ver_lt('1.2:1.4', '1.6')
- self.assert_ver_gt('1.6', '1.2:1.4')
- self.assert_ver_eq('1.2:1.4', '1.2:1.4')
- self.assertNotEqual(ver('1.2:1.4'), ver('1.2:1.6'))
-
- self.assert_ver_lt('1.2:1.4', '1.5:1.6')
- self.assert_ver_gt('1.5:1.6', '1.2:1.4')
-
- def test_contains(self):
- self.assert_in('1.3', '1.2:1.4')
- self.assert_in('1.2.5', '1.2:1.4')
- self.assert_in('1.3.5', '1.2:1.4')
- self.assert_in('1.3.5-7', '1.2:1.4')
- self.assert_not_in('1.1', '1.2:1.4')
- self.assert_not_in('1.5', '1.2:1.4')
-
- self.assert_in('1.4.2', '1.2:1.4')
- self.assert_not_in('1.4.2', '1.2:1.4.0')
-
- self.assert_in('1.2.8', '1.2.7:1.4')
- self.assert_in('1.2.7:1.4', ':')
- self.assert_not_in('1.2.5', '1.2.7:1.4')
-
- self.assert_in('1.4.1', '1.2.7:1.4')
- self.assert_not_in('1.4.1', '1.2.7:1.4.0')
-
- def test_in_list(self):
- self.assert_in('1.2', ['1.5', '1.2', '1.3'])
- self.assert_in('1.2.5', ['1.5', '1.2:1.3'])
- self.assert_in('1.5', ['1.5', '1.2:1.3'])
- self.assert_not_in('1.4', ['1.5', '1.2:1.3'])
-
- self.assert_in('1.2.5:1.2.7', [':'])
- self.assert_in('1.2.5:1.2.7', ['1.5', '1.2:1.3'])
- self.assert_not_in('1.2.5:1.5', ['1.5', '1.2:1.3'])
- self.assert_not_in('1.1:1.2.5', ['1.5', '1.2:1.3'])
-
- def test_ranges_overlap(self):
- self.assert_overlaps('1.2', '1.2')
- self.assert_overlaps('1.2.1', '1.2.1')
- self.assert_overlaps('1.2.1b', '1.2.1b')
-
- self.assert_overlaps('1.2:1.7', '1.6:1.9')
- self.assert_overlaps(':1.7', '1.6:1.9')
- self.assert_overlaps(':1.7', ':1.9')
- self.assert_overlaps(':1.7', '1.6:')
- self.assert_overlaps('1.2:', '1.6:1.9')
- self.assert_overlaps('1.2:', ':1.9')
- self.assert_overlaps('1.2:', '1.6:')
- self.assert_overlaps(':', ':')
- self.assert_overlaps(':', '1.6:1.9')
- self.assert_overlaps('1.6:1.9', ':')
-
- def test_overlap_with_containment(self):
- self.assert_in('1.6.5', '1.6')
- self.assert_in('1.6.5', ':1.6')
-
- self.assert_overlaps('1.6.5', ':1.6')
- self.assert_overlaps(':1.6', '1.6.5')
-
- self.assert_not_in(':1.6', '1.6.5')
- self.assert_in('1.6.5', ':1.6')
-
- def test_lists_overlap(self):
- self.assert_overlaps('1.2b:1.7,5', '1.6:1.9,1')
- self.assert_overlaps('1,2,3,4,5', '3,4,5,6,7')
- self.assert_overlaps('1,2,3,4,5', '5,6,7')
- self.assert_overlaps('1,2,3,4,5', '5:7')
- self.assert_overlaps('1,2,3,4,5', '3, 6:7')
- self.assert_overlaps('1, 2, 4, 6.5', '3, 6:7')
- self.assert_overlaps('1, 2, 4, 6.5', ':, 5, 8')
- self.assert_overlaps('1, 2, 4, 6.5', ':')
- self.assert_no_overlap('1, 2, 4', '3, 6:7')
- self.assert_no_overlap('1,2,3,4,5', '6,7')
- self.assert_no_overlap('1,2,3,4,5', '6:7')
-
- def test_canonicalize_list(self):
- self.assert_canonical(['1.2', '1.3', '1.4'],
- ['1.2', '1.3', '1.3', '1.4'])
-
- self.assert_canonical(['1.2', '1.3:1.4'],
- ['1.2', '1.3', '1.3:1.4'])
-
- self.assert_canonical(['1.2', '1.3:1.4'],
- ['1.2', '1.3:1.4', '1.4'])
-
- self.assert_canonical(['1.3:1.4'],
- ['1.3:1.4', '1.3', '1.3.1', '1.3.9', '1.4'])
-
- self.assert_canonical(['1.3:1.4'],
- ['1.3', '1.3.1', '1.3.9', '1.4', '1.3:1.4'])
-
- self.assert_canonical(['1.3:1.5'],
- ['1.3', '1.3.1', '1.3.9', '1.4:1.5', '1.3:1.4'])
-
- self.assert_canonical(['1.3:1.5'],
- ['1.3, 1.3.1,1.3.9,1.4:1.5,1.3:1.4'])
-
- self.assert_canonical(['1.3:1.5'],
- ['1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
-
- self.assert_canonical([':'],
- [':,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
-
- def test_intersection(self):
- self.check_intersection('2.5',
- '1.0:2.5', '2.5:3.0')
- self.check_intersection('2.5:2.7',
- '1.0:2.7', '2.5:3.0')
- self.check_intersection('0:1', ':', '0:1')
-
- self.check_intersection(['1.0', '2.5:2.7'],
- ['1.0:2.7'], ['2.5:3.0', '1.0'])
- self.check_intersection(['2.5:2.7'],
- ['1.1:2.7'], ['2.5:3.0', '1.0'])
- self.check_intersection(['0:1'], [':'], ['0:1'])
-
- def test_intersect_with_containment(self):
- self.check_intersection('1.6.5', '1.6.5', ':1.6')
- self.check_intersection('1.6.5', ':1.6', '1.6.5')
-
- self.check_intersection('1.6:1.6.5', ':1.6.5', '1.6')
- self.check_intersection('1.6:1.6.5', '1.6', ':1.6.5')
-
- def test_union_with_containment(self):
- self.check_union(':1.6', '1.6.5', ':1.6')
- self.check_union(':1.6', ':1.6', '1.6.5')
-
- self.check_union(':1.6', ':1.6.5', '1.6')
- self.check_union(':1.6', '1.6', ':1.6.5')
-
- self.check_union(':', '1.0:', ':2.0')
-
- self.check_union('1:4', '1:3', '2:4')
- self.check_union('1:4', '2:4', '1:3')
-
- # Tests successor/predecessor case.
- self.check_union('1:4', '1:2', '3:4')
-
- def test_basic_version_satisfaction(self):
- self.assert_satisfies('4.7.3', '4.7.3')
-
- self.assert_satisfies('4.7.3', '4.7')
- self.assert_satisfies('4.7.3b2', '4.7')
- self.assert_satisfies('4.7b6', '4.7')
-
- self.assert_satisfies('4.7.3', '4')
- self.assert_satisfies('4.7.3b2', '4')
- self.assert_satisfies('4.7b6', '4')
-
- self.assert_does_not_satisfy('4.8.0', '4.9')
- self.assert_does_not_satisfy('4.8', '4.9')
- self.assert_does_not_satisfy('4', '4.9')
-
- def test_basic_version_satisfaction_in_lists(self):
- self.assert_satisfies(['4.7.3'], ['4.7.3'])
-
- self.assert_satisfies(['4.7.3'], ['4.7'])
- self.assert_satisfies(['4.7.3b2'], ['4.7'])
- self.assert_satisfies(['4.7b6'], ['4.7'])
-
- self.assert_satisfies(['4.7.3'], ['4'])
- self.assert_satisfies(['4.7.3b2'], ['4'])
- self.assert_satisfies(['4.7b6'], ['4'])
-
- self.assert_does_not_satisfy(['4.8.0'], ['4.9'])
- self.assert_does_not_satisfy(['4.8'], ['4.9'])
- self.assert_does_not_satisfy(['4'], ['4.9'])
-
- def test_version_range_satisfaction(self):
- self.assert_satisfies('4.7b6', '4.3:4.7')
- self.assert_satisfies('4.3.0', '4.3:4.7')
- self.assert_satisfies('4.3.2', '4.3:4.7')
-
- self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
- self.assert_does_not_satisfy('4.3', '4.4:4.7')
-
- self.assert_satisfies('4.7b6', '4.3:4.7')
- self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
-
- def test_version_range_satisfaction_in_lists(self):
- self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
- self.assert_satisfies(['4.3.0'], ['4.3:4.7'])
- self.assert_satisfies(['4.3.2'], ['4.3:4.7'])
-
- self.assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
- self.assert_does_not_satisfy(['4.3'], ['4.4:4.7'])
-
- self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
- self.assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
-
- def test_satisfaction_with_lists(self):
- self.assert_satisfies('4.7', '4.3, 4.6, 4.7')
- self.assert_satisfies('4.7.3', '4.3, 4.6, 4.7')
- self.assert_satisfies('4.6.5', '4.3, 4.6, 4.7')
- self.assert_satisfies('4.6.5.2', '4.3, 4.6, 4.7')
-
- self.assert_does_not_satisfy('4', '4.3, 4.6, 4.7')
- self.assert_does_not_satisfy('4.8.0', '4.2, 4.3:4.7')
-
- self.assert_satisfies('4.8.0', '4.2, 4.3:4.8')
- self.assert_satisfies('4.8.2', '4.2, 4.3:4.8')
-
- def test_formatted_strings(self):
- versions = '1.2.3', '1_2_3', '1-2-3'
- for item in versions:
- v = Version(item)
- self.assertEqual(v.dotted, '1.2.3')
- self.assertEqual(v.dashed, '1-2-3')
- self.assertEqual(v.underscored, '1_2_3')
- self.assertEqual(v.joined, '123')
-
- def test_repr_and_str(self):
-
- def check_repr_and_str(vrs):
- a = Version(vrs)
- self.assertEqual(repr(a), 'Version(\'' + vrs + '\')')
- b = eval(repr(a))
- self.assertEqual(a, b)
- self.assertEqual(str(a), vrs)
- self.assertEqual(str(a), str(b))
-
- check_repr_and_str('1.2.3')
- check_repr_and_str('R2016a')
- check_repr_and_str('R2016a.2-3_4')
-
- def test_get_item(self):
- a = Version('0.1_2-3')
- self.assertTrue(isinstance(a[1], int))
- # Test slicing
- b = a[0:2]
- self.assertTrue(isinstance(b, Version))
- self.assertEqual(b, Version('0.1'))
- self.assertEqual(repr(b), 'Version(\'0.1\')')
- self.assertEqual(str(b), '0.1')
- b = a[0:3]
- self.assertTrue(isinstance(b, Version))
- self.assertEqual(b, Version('0.1_2'))
- self.assertEqual(repr(b), 'Version(\'0.1_2\')')
- self.assertEqual(str(b), '0.1_2')
- b = a[1:]
- self.assertTrue(isinstance(b, Version))
- self.assertEqual(b, Version('1_2-3'))
- self.assertEqual(repr(b), 'Version(\'1_2-3\')')
- self.assertEqual(str(b), '1_2-3')
- # Raise TypeError on tuples
- self.assertRaises(TypeError, b.__getitem__, 1, 2)
-
-if __name__ == '__main__':
- unittest.main()
+def assert_ver_lt(a, b):
+ """Asserts the results of comparisons when 'a' is less than 'b'."""
+ a, b = ver(a), ver(b)
+ assert a < b
+ assert a <= b
+ assert a != b
+ assert not a == b
+ assert not a > b
+ assert not a >= b
+
+
+def assert_ver_gt(a, b):
+ """Asserts the results of comparisons when 'a' is greater than 'b'."""
+ a, b = ver(a), ver(b)
+ assert a > b
+ assert a >= b
+ assert a != b
+ assert not a == b
+ assert not a < b
+ assert not a <= b
+
+
+def assert_ver_eq(a, b):
+ """Asserts the results of comparisons when 'a' is equal to 'b'."""
+ a, b = ver(a), ver(b)
+ assert not a > b
+ assert a >= b
+ assert not a != b
+ assert a == b
+ assert not a < b
+ assert a <= b
+
+
+def assert_in(needle, haystack):
+ """Asserts that 'needle' is in 'haystack'."""
+ assert ver(needle) in ver(haystack)
+
+
+def assert_not_in(needle, haystack):
+ """Asserts that 'needle' is not in 'haystack'."""
+ assert ver(needle) not in ver(haystack)
+
+
+def assert_canonical(canonical_list, version_list):
+ """Asserts that a redundant list is reduced to canonical form."""
+ assert ver(canonical_list) == ver(version_list)
+
+
+def assert_overlaps(v1, v2):
+ """Asserts that two version ranges overlaps."""
+ assert ver(v1).overlaps(ver(v2))
+
+
+def assert_no_overlap(v1, v2):
+ """Asserts that two version ranges do not overlap."""
+ assert not ver(v1).overlaps(ver(v2))
+
+
+def assert_satisfies(v1, v2):
+ """Asserts that 'v1' satisfies 'v2'."""
+ assert ver(v1).satisfies(ver(v2))
+
+
+def assert_does_not_satisfy(v1, v2):
+ """Asserts that 'v1' does not satisfy 'v2'."""
+ assert not ver(v1).satisfies(ver(v2))
+
+
+def check_intersection(expected, a, b):
+ """Asserts that 'a' intersect 'b' == 'expected'."""
+ assert ver(expected) == ver(a).intersection(ver(b))
+
+
+def check_union(expected, a, b):
+ """Asserts that 'a' union 'b' == 'expected'."""
+ assert ver(expected) == ver(a).union(ver(b))
+
+
+def test_two_segments():
+ assert_ver_eq('1.0', '1.0')
+ assert_ver_lt('1.0', '2.0')
+ assert_ver_gt('2.0', '1.0')
+ assert_ver_eq('develop', 'develop')
+ assert_ver_lt('1.0', 'develop')
+ assert_ver_gt('develop', '1.0')
+
+
+def test_three_segments():
+ assert_ver_eq('2.0.1', '2.0.1')
+ assert_ver_lt('2.0', '2.0.1')
+ assert_ver_gt('2.0.1', '2.0')
+
+
+def test_alpha():
+ # TODO: not sure whether I like this. 2.0.1a is *usually*
+ # TODO: less than 2.0.1, but special-casing it makes version
+ # TODO: comparison complicated. See version.py
+ assert_ver_eq('2.0.1a', '2.0.1a')
+ assert_ver_gt('2.0.1a', '2.0.1')
+ assert_ver_lt('2.0.1', '2.0.1a')
+
+
+def test_patch():
+ assert_ver_eq('5.5p1', '5.5p1')
+ assert_ver_lt('5.5p1', '5.5p2')
+ assert_ver_gt('5.5p2', '5.5p1')
+ assert_ver_eq('5.5p10', '5.5p10')
+ assert_ver_lt('5.5p1', '5.5p10')
+ assert_ver_gt('5.5p10', '5.5p1')
+
+
+def test_num_alpha_with_no_separator():
+ assert_ver_lt('10xyz', '10.1xyz')
+ assert_ver_gt('10.1xyz', '10xyz')
+ assert_ver_eq('xyz10', 'xyz10')
+ assert_ver_lt('xyz10', 'xyz10.1')
+ assert_ver_gt('xyz10.1', 'xyz10')
+
+
+def test_alpha_with_dots():
+ assert_ver_eq('xyz.4', 'xyz.4')
+ assert_ver_lt('xyz.4', '8')
+ assert_ver_gt('8', 'xyz.4')
+ assert_ver_lt('xyz.4', '2')
+ assert_ver_gt('2', 'xyz.4')
+
+
+def test_nums_and_patch():
+ assert_ver_lt('5.5p2', '5.6p1')
+ assert_ver_gt('5.6p1', '5.5p2')
+ assert_ver_lt('5.6p1', '6.5p1')
+ assert_ver_gt('6.5p1', '5.6p1')
+
+
+def test_rc_versions():
+ assert_ver_gt('6.0.rc1', '6.0')
+ assert_ver_lt('6.0', '6.0.rc1')
+
+
+def test_alpha_beta():
+ assert_ver_gt('10b2', '10a1')
+ assert_ver_lt('10a2', '10b2')
+
+
+def test_double_alpha():
+ assert_ver_eq('1.0aa', '1.0aa')
+ assert_ver_lt('1.0a', '1.0aa')
+ assert_ver_gt('1.0aa', '1.0a')
+
+
+def test_padded_numbers():
+ assert_ver_eq('10.0001', '10.0001')
+ assert_ver_eq('10.0001', '10.1')
+ assert_ver_eq('10.1', '10.0001')
+ assert_ver_lt('10.0001', '10.0039')
+ assert_ver_gt('10.0039', '10.0001')
+
+
+def test_close_numbers():
+ assert_ver_lt('4.999.9', '5.0')
+ assert_ver_gt('5.0', '4.999.9')
+
+
+def test_date_stamps():
+ assert_ver_eq('20101121', '20101121')
+ assert_ver_lt('20101121', '20101122')
+ assert_ver_gt('20101122', '20101121')
+
+
+def test_underscores():
+ assert_ver_eq('2_0', '2_0')
+ assert_ver_eq('2.0', '2_0')
+ assert_ver_eq('2_0', '2.0')
+
+
+def test_rpm_oddities():
+ assert_ver_eq('1b.fc17', '1b.fc17')
+ assert_ver_lt('1b.fc17', '1.fc17')
+ assert_ver_gt('1.fc17', '1b.fc17')
+ assert_ver_eq('1g.fc17', '1g.fc17')
+ assert_ver_gt('1g.fc17', '1.fc17')
+ assert_ver_lt('1.fc17', '1g.fc17')
+
+
+# Stuff below here is not taken from RPM's tests and is
+# unique to spack
+def test_version_ranges():
+ assert_ver_lt('1.2:1.4', '1.6')
+ assert_ver_gt('1.6', '1.2:1.4')
+ assert_ver_eq('1.2:1.4', '1.2:1.4')
+ assert ver('1.2:1.4') != ver('1.2:1.6')
+
+ assert_ver_lt('1.2:1.4', '1.5:1.6')
+ assert_ver_gt('1.5:1.6', '1.2:1.4')
+
+
+def test_contains():
+ assert_in('1.3', '1.2:1.4')
+ assert_in('1.2.5', '1.2:1.4')
+ assert_in('1.3.5', '1.2:1.4')
+ assert_in('1.3.5-7', '1.2:1.4')
+ assert_not_in('1.1', '1.2:1.4')
+ assert_not_in('1.5', '1.2:1.4')
+
+ assert_in('1.4.2', '1.2:1.4')
+ assert_not_in('1.4.2', '1.2:1.4.0')
+
+ assert_in('1.2.8', '1.2.7:1.4')
+ assert_in('1.2.7:1.4', ':')
+ assert_not_in('1.2.5', '1.2.7:1.4')
+
+ assert_in('1.4.1', '1.2.7:1.4')
+ assert_not_in('1.4.1', '1.2.7:1.4.0')
+
+
+def test_in_list():
+ assert_in('1.2', ['1.5', '1.2', '1.3'])
+ assert_in('1.2.5', ['1.5', '1.2:1.3'])
+ assert_in('1.5', ['1.5', '1.2:1.3'])
+ assert_not_in('1.4', ['1.5', '1.2:1.3'])
+
+ assert_in('1.2.5:1.2.7', [':'])
+ assert_in('1.2.5:1.2.7', ['1.5', '1.2:1.3'])
+ assert_not_in('1.2.5:1.5', ['1.5', '1.2:1.3'])
+ assert_not_in('1.1:1.2.5', ['1.5', '1.2:1.3'])
+
+
+def test_ranges_overlap():
+ assert_overlaps('1.2', '1.2')
+ assert_overlaps('1.2.1', '1.2.1')
+ assert_overlaps('1.2.1b', '1.2.1b')
+
+ assert_overlaps('1.2:1.7', '1.6:1.9')
+ assert_overlaps(':1.7', '1.6:1.9')
+ assert_overlaps(':1.7', ':1.9')
+ assert_overlaps(':1.7', '1.6:')
+ assert_overlaps('1.2:', '1.6:1.9')
+ assert_overlaps('1.2:', ':1.9')
+ assert_overlaps('1.2:', '1.6:')
+ assert_overlaps(':', ':')
+ assert_overlaps(':', '1.6:1.9')
+ assert_overlaps('1.6:1.9', ':')
+
+
+def test_overlap_with_containment():
+ assert_in('1.6.5', '1.6')
+ assert_in('1.6.5', ':1.6')
+
+ assert_overlaps('1.6.5', ':1.6')
+ assert_overlaps(':1.6', '1.6.5')
+
+ assert_not_in(':1.6', '1.6.5')
+ assert_in('1.6.5', ':1.6')
+
+
+def test_lists_overlap():
+ assert_overlaps('1.2b:1.7,5', '1.6:1.9,1')
+ assert_overlaps('1,2,3,4,5', '3,4,5,6,7')
+ assert_overlaps('1,2,3,4,5', '5,6,7')
+ assert_overlaps('1,2,3,4,5', '5:7')
+ assert_overlaps('1,2,3,4,5', '3, 6:7')
+ assert_overlaps('1, 2, 4, 6.5', '3, 6:7')
+ assert_overlaps('1, 2, 4, 6.5', ':, 5, 8')
+ assert_overlaps('1, 2, 4, 6.5', ':')
+ assert_no_overlap('1, 2, 4', '3, 6:7')
+ assert_no_overlap('1,2,3,4,5', '6,7')
+ assert_no_overlap('1,2,3,4,5', '6:7')
+
+
+def test_canonicalize_list():
+ assert_canonical(['1.2', '1.3', '1.4'], ['1.2', '1.3', '1.3', '1.4'])
+
+ assert_canonical(['1.2', '1.3:1.4'], ['1.2', '1.3', '1.3:1.4'])
+
+ assert_canonical(['1.2', '1.3:1.4'], ['1.2', '1.3:1.4', '1.4'])
+
+ assert_canonical(['1.3:1.4'], ['1.3:1.4', '1.3', '1.3.1', '1.3.9', '1.4'])
+
+ assert_canonical(['1.3:1.4'], ['1.3', '1.3.1', '1.3.9', '1.4', '1.3:1.4'])
+
+ assert_canonical(
+ ['1.3:1.5'], ['1.3', '1.3.1', '1.3.9', '1.4:1.5', '1.3:1.4']
+ )
+
+ assert_canonical(['1.3:1.5'], ['1.3, 1.3.1,1.3.9,1.4:1.5,1.3:1.4'])
+
+ assert_canonical(['1.3:1.5'], ['1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
+
+ assert_canonical([':'], [':,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
+
+
+def test_intersection():
+ check_intersection('2.5', '1.0:2.5', '2.5:3.0')
+ check_intersection('2.5:2.7', '1.0:2.7', '2.5:3.0')
+ check_intersection('0:1', ':', '0:1')
+
+ check_intersection(['1.0', '2.5:2.7'], ['1.0:2.7'], ['2.5:3.0', '1.0'])
+ check_intersection(['2.5:2.7'], ['1.1:2.7'], ['2.5:3.0', '1.0'])
+ check_intersection(['0:1'], [':'], ['0:1'])
+
+
+def test_intersect_with_containment():
+ check_intersection('1.6.5', '1.6.5', ':1.6')
+ check_intersection('1.6.5', ':1.6', '1.6.5')
+
+ check_intersection('1.6:1.6.5', ':1.6.5', '1.6')
+ check_intersection('1.6:1.6.5', '1.6', ':1.6.5')
+
+
+def test_union_with_containment():
+ check_union(':1.6', '1.6.5', ':1.6')
+ check_union(':1.6', ':1.6', '1.6.5')
+
+ check_union(':1.6', ':1.6.5', '1.6')
+ check_union(':1.6', '1.6', ':1.6.5')
+
+ check_union(':', '1.0:', ':2.0')
+
+ check_union('1:4', '1:3', '2:4')
+ check_union('1:4', '2:4', '1:3')
+
+ # Tests successor/predecessor case.
+ check_union('1:4', '1:2', '3:4')
+
+
+def test_basic_version_satisfaction():
+ assert_satisfies('4.7.3', '4.7.3')
+
+ assert_satisfies('4.7.3', '4.7')
+ assert_satisfies('4.7.3b2', '4.7')
+ assert_satisfies('4.7b6', '4.7')
+
+ assert_satisfies('4.7.3', '4')
+ assert_satisfies('4.7.3b2', '4')
+ assert_satisfies('4.7b6', '4')
+
+ assert_does_not_satisfy('4.8.0', '4.9')
+ assert_does_not_satisfy('4.8', '4.9')
+ assert_does_not_satisfy('4', '4.9')
+
+
+def test_basic_version_satisfaction_in_lists():
+ assert_satisfies(['4.7.3'], ['4.7.3'])
+
+ assert_satisfies(['4.7.3'], ['4.7'])
+ assert_satisfies(['4.7.3b2'], ['4.7'])
+ assert_satisfies(['4.7b6'], ['4.7'])
+
+ assert_satisfies(['4.7.3'], ['4'])
+ assert_satisfies(['4.7.3b2'], ['4'])
+ assert_satisfies(['4.7b6'], ['4'])
+
+ assert_does_not_satisfy(['4.8.0'], ['4.9'])
+ assert_does_not_satisfy(['4.8'], ['4.9'])
+ assert_does_not_satisfy(['4'], ['4.9'])
+
+
+def test_version_range_satisfaction():
+ assert_satisfies('4.7b6', '4.3:4.7')
+ assert_satisfies('4.3.0', '4.3:4.7')
+ assert_satisfies('4.3.2', '4.3:4.7')
+
+ assert_does_not_satisfy('4.8.0', '4.3:4.7')
+ assert_does_not_satisfy('4.3', '4.4:4.7')
+
+ assert_satisfies('4.7b6', '4.3:4.7')
+ assert_does_not_satisfy('4.8.0', '4.3:4.7')
+
+
+def test_version_range_satisfaction_in_lists():
+ assert_satisfies(['4.7b6'], ['4.3:4.7'])
+ assert_satisfies(['4.3.0'], ['4.3:4.7'])
+ assert_satisfies(['4.3.2'], ['4.3:4.7'])
+
+ assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
+ assert_does_not_satisfy(['4.3'], ['4.4:4.7'])
+
+ assert_satisfies(['4.7b6'], ['4.3:4.7'])
+ assert_does_not_satisfy(['4.8.0'], ['4.3:4.7'])
+
+
+def test_satisfaction_with_lists():
+ assert_satisfies('4.7', '4.3, 4.6, 4.7')
+ assert_satisfies('4.7.3', '4.3, 4.6, 4.7')
+ assert_satisfies('4.6.5', '4.3, 4.6, 4.7')
+ assert_satisfies('4.6.5.2', '4.3, 4.6, 4.7')
+
+ assert_does_not_satisfy('4', '4.3, 4.6, 4.7')
+ assert_does_not_satisfy('4.8.0', '4.2, 4.3:4.7')
+
+ assert_satisfies('4.8.0', '4.2, 4.3:4.8')
+ assert_satisfies('4.8.2', '4.2, 4.3:4.8')
+
+
+def test_formatted_strings():
+ versions = '1.2.3', '1_2_3', '1-2-3'
+ for item in versions:
+ v = Version(item)
+ assert v.dotted == '1.2.3'
+ assert v.dashed == '1-2-3'
+ assert v.underscored == '1_2_3'
+ assert v.joined == '123'
+
+
+def test_repr_and_str():
+
+ def check_repr_and_str(vrs):
+ a = Version(vrs)
+ assert repr(a) == 'Version(\'' + vrs + '\')'
+ b = eval(repr(a))
+ assert a == b
+ assert str(a) == vrs
+ assert str(a) == str(b)
+
+ check_repr_and_str('1.2.3')
+ check_repr_and_str('R2016a')
+ check_repr_and_str('R2016a.2-3_4')
+
+
+def test_get_item():
+ a = Version('0.1_2-3')
+ assert isinstance(a[1], int)
+ # Test slicing
+ b = a[0:2]
+ assert isinstance(b, Version)
+ assert b == Version('0.1')
+ assert repr(b) == 'Version(\'0.1\')'
+ assert str(b) == '0.1'
+ b = a[0:3]
+ assert isinstance(b, Version)
+ assert b == Version('0.1_2')
+ assert repr(b) == 'Version(\'0.1_2\')'
+ assert str(b) == '0.1_2'
+ b = a[1:]
+ assert isinstance(b, Version)
+ assert b == Version('1_2-3')
+ assert repr(b) == 'Version(\'1_2-3\')'
+ assert str(b) == '1_2-3'
+ # Raise TypeError on tuples
+ with pytest.raises(TypeError):
+ b.__getitem__(1, 2)
diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py
new file mode 100644
index 0000000000..9fa95a8d18
--- /dev/null
+++ b/lib/spack/spack/test/web.py
@@ -0,0 +1,162 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Tests for web.py."""
+import os
+
+import spack
+from spack.util.web import spider, find_versions_of_archive
+from spack.version import *
+
+
+web_data_path = os.path.join(spack.test_path, 'data', 'web')
+
+root = 'file://' + web_data_path + '/index.html'
+root_tarball = 'file://' + web_data_path + '/foo-0.0.0.tar.gz'
+
+page_1 = 'file://' + os.path.join(web_data_path, '1.html')
+page_2 = 'file://' + os.path.join(web_data_path, '2.html')
+page_3 = 'file://' + os.path.join(web_data_path, '3.html')
+page_4 = 'file://' + os.path.join(web_data_path, '4.html')
+
+
+def test_spider_0():
+ pages, links = spider(root, depth=0)
+
+ assert root in pages
+ assert page_1 not in pages
+ assert page_2 not in pages
+ assert page_3 not in pages
+ assert page_4 not in pages
+
+ assert "This is the root page." in pages[root]
+
+ assert root not in links
+ assert page_1 in links
+ assert page_2 not in links
+ assert page_3 not in links
+ assert page_4 not in links
+
+
+def test_spider_1():
+ pages, links = spider(root, depth=1)
+
+ assert root in pages
+ assert page_1 in pages
+ assert page_2 not in pages
+ assert page_3 not in pages
+ assert page_4 not in pages
+
+ assert "This is the root page." in pages[root]
+ assert "This is page 1." in pages[page_1]
+
+ assert root not in links
+ assert page_1 in links
+ assert page_2 in links
+ assert page_3 not in links
+ assert page_4 not in links
+
+
+def test_spider_2():
+ pages, links = spider(root, depth=2)
+
+ assert root in pages
+ assert page_1 in pages
+ assert page_2 in pages
+ assert page_3 not in pages
+ assert page_4 not in pages
+
+ assert "This is the root page." in pages[root]
+ assert "This is page 1." in pages[page_1]
+ assert "This is page 2." in pages[page_2]
+
+ assert root not in links
+ assert page_1 in links
+ assert page_1 in links
+ assert page_2 in links
+ assert page_3 in links
+ assert page_4 in links
+
+
+def test_spider_3():
+ pages, links = spider(root, depth=3)
+
+ assert root in pages
+ assert page_1 in pages
+ assert page_2 in pages
+ assert page_3 in pages
+ assert page_4 in pages
+
+ assert "This is the root page." in pages[root]
+ assert "This is page 1." in pages[page_1]
+ assert "This is page 2." in pages[page_2]
+ assert "This is page 3." in pages[page_3]
+ assert "This is page 4." in pages[page_4]
+
+ assert root in links # circular link on page 3
+ assert page_1 in links
+ assert page_1 in links
+ assert page_2 in links
+ assert page_3 in links
+ assert page_4 in links
+
+
+def test_find_versions_of_archive_0():
+ versions = find_versions_of_archive(root_tarball, root, list_depth=0)
+ assert ver('0.0.0') in versions
+
+
+def test_find_versions_of_archive_1():
+ versions = find_versions_of_archive(root_tarball, root, list_depth=1)
+ assert ver('0.0.0') in versions
+ assert ver('1.0.0') in versions
+
+
+def test_find_versions_of_archive_2():
+ versions = find_versions_of_archive(root_tarball, root, list_depth=2)
+ assert ver('0.0.0') in versions
+ assert ver('1.0.0') in versions
+ assert ver('2.0.0') in versions
+
+
+def test_find_exotic_versions_of_archive_2():
+ versions = find_versions_of_archive(root_tarball, root, list_depth=2)
+ # up for grabs to make this better.
+ assert ver('2.0.0b2') in versions
+
+
+def test_find_versions_of_archive_3():
+ versions = find_versions_of_archive(root_tarball, root, list_depth=3)
+ assert ver('0.0.0') in versions
+ assert ver('1.0.0') in versions
+ assert ver('2.0.0') in versions
+ assert ver('3.0') in versions
+ assert ver('4.5') in versions
+
+
+def test_find_exotic_versions_of_archive_3():
+ versions = find_versions_of_archive(root_tarball, root, list_depth=3)
+ assert ver('2.0.0b2') in versions
+ assert ver('3.0a1') in versions
+ assert ver('4.5-rc5') in versions
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index 65f8e12e58..174f7d0b3c 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -46,8 +46,8 @@ it's never been told about that version before.
"""
import os
import re
-from StringIO import StringIO
-from urlparse import urlsplit, urlunsplit
+from six import StringIO
+from six.moves.urllib.parse import urlsplit, urlunsplit
import llnl.util.tty as tty
from llnl.util.tty.color import *
@@ -71,7 +71,7 @@ def find_list_url(url):
url_types = [
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
- (r'^(https://github.com/[^/]+/[^/]+)/archive/',
+ (r'(.*github\.com/[^/]+/[^/]+)/archive/',
lambda m: m.group(1) + '/releases')]
for pattern, fun in url_types:
@@ -101,6 +101,177 @@ def strip_query_and_fragment(path):
return (path, '') # Ignore URL parse errors here
+def strip_version_suffixes(path):
+ """Some tarballs contain extraneous information after the version:
+
+ * ``bowtie2-2.2.5-source``
+ * ``libevent-2.0.21-stable``
+ * ``cuda_8.0.44_linux.run``
+
+ These strings are not part of the version number and should be ignored.
+ This function strips those suffixes off and returns the remaining string.
+ The goal is that the version is always the last thing in ``path``:
+
+ * ``bowtie2-2.2.5``
+ * ``libevent-2.0.21``
+ * ``cuda_8.0.44``
+
+ :param str path: The filename or URL for the package
+ :return: The ``path`` with any extraneous suffixes removed
+ :rtype: str
+ """
+ # NOTE: This could be done with complicated regexes in parse_version_offset
+ # NOTE: The problem is that we would have to add these regexes to the end
+ # NOTE: of every single version regex. Easier to just strip them off
+ # NOTE: permanently
+
+ suffix_regexes = [
+ # Download type
+ '[Ii]nstall',
+ 'all',
+ 'src(_0)?',
+ '[Ss]ources?',
+ 'file',
+ 'full',
+ 'single',
+ 'public',
+ 'with[a-zA-Z_-]+',
+ 'bin',
+ 'binary',
+ 'run',
+ '[Uu]niversal',
+ 'jar',
+ 'complete',
+ 'oss',
+ 'gem',
+ 'tar',
+ 'sh',
+
+ # Download version
+ 'stable',
+ '[Ff]inal',
+ 'rel',
+ 'orig',
+ 'dist',
+ '\+',
+
+ # License
+ 'gpl',
+
+ # Arch
+ # Needs to come before and after OS, appears in both orders
+ 'ia32',
+ 'intel',
+ 'amd64',
+ 'x64',
+ 'x86_64',
+ 'x86',
+ 'i[36]86',
+ 'ppc64(le)?',
+ 'armv?(7l|6l|64)',
+
+ # OS
+ '[Ll]inux(_64)?',
+ '[Uu]ni?x',
+ '[Ss]un[Oo][Ss]',
+ '[Mm]ac[Oo][Ss][Xx]?',
+ '[Oo][Ss][Xx]',
+ '[Dd]arwin(64)?',
+ '[Aa]pple',
+ '[Ww]indows',
+ '[Ww]in(64|32)?',
+ '[Cc]ygwin(64|32)?',
+ '[Mm]ingw',
+
+ # Arch
+ # Needs to come before and after OS, appears in both orders
+ 'ia32',
+ 'intel',
+ 'amd64',
+ 'x64',
+ 'x86_64',
+ 'x86',
+ 'i[36]86',
+ 'ppc64(le)?',
+ 'armv?(7l|6l|64)?',
+
+ # PyPI
+ '[._-]py[23].*\.whl',
+ '[._-]cp[23].*\.whl',
+ '[._-]win.*\.exe',
+ ]
+
+ for regex in suffix_regexes:
+ # Remove the suffix from the end of the path
+ # This may be done multiple times
+ path = re.sub(r'[._-]?' + regex + '$', '', path)
+
+ return path
+
+
+def strip_name_suffixes(path, version):
+ """Most tarballs contain a package name followed by a version number.
+ However, some also contain extraneous information in-between the name
+ and version:
+
+ * ``rgb-1.0.6``
+ * ``converge_install_2.3.16``
+ * ``jpegsrc.v9b``
+
+ These strings are not part of the package name and should be ignored.
+ This function strips the version number and any extraneous suffixes
+ off and returns the remaining string. The goal is that the name is
+ always the last thing in ``path``:
+
+ * ``rgb``
+ * ``converge``
+ * ``jpeg``
+
+ :param str path: The filename or URL for the package
+ :param str version: The version detected for this URL
+ :return: The ``path`` with any extraneous suffixes removed
+ :rtype: str
+ """
+ # NOTE: This could be done with complicated regexes in parse_name_offset
+ # NOTE: The problem is that we would have to add these regexes to every
+ # NOTE: single name regex. Easier to just strip them off permanently
+
+ suffix_regexes = [
+ # Strip off the version and anything after it
+
+ # name-ver
+ # name_ver
+ # name.ver
+ r'[._-]v?' + str(version) + '.*',
+
+ # namever
+ str(version) + '.*',
+
+ # Download type
+ 'install',
+ 'src',
+ '(open)?[Ss]ources?',
+ '[._-]std',
+
+ # Download version
+ 'snapshot',
+ 'distrib',
+
+ # VCS
+ '0\+bzr',
+
+ # License
+ 'gpl',
+ ]
+
+ for regex in suffix_regexes:
+ # Remove the suffix from the end of the path
+ # This may be done multiple times
+ path = re.sub('[._-]?' + regex + '$', '', path)
+
+ return path
+
+
def split_url_extension(path):
"""Some URLs have a query string, e.g.:
@@ -125,7 +296,7 @@ def split_url_extension(path):
prefix, ext, suffix = path, '', ''
# Strip off sourceforge download suffix.
- match = re.search(r'((?:sourceforge.net|sf.net)/.*)(/download)$', path)
+ match = re.search(r'((?:sourceforge\.net|sf\.net)/.*)(/download)$', path)
if match:
prefix, suffix = match.groups()
@@ -189,8 +360,20 @@ def parse_version_offset(path):
path, ext, suffix = split_url_extension(path)
# stem: Everything from path after the final '/'
- stem = os.path.basename(path)
- offset = len(path) - len(stem)
+ original_stem = os.path.basename(path)
+
+ # Try to strip off anything after the version number
+ stem = strip_version_suffixes(original_stem)
+
+ # Assumptions:
+ #
+ # 1. version always comes after the name
+ # 2. separators include '-', '_', and '.'
+ # 3. names can contain A-Z, a-z, 0-9, '+', separators
+ # 4. versions can contain A-Z, a-z, 0-9, separators
+ # 5. versions always start with a digit
+ # 6. versions are often prefixed by a 'v' character
+ # 7. separators are most reliable to determine name/version boundaries
# List of the following format:
#
@@ -202,87 +385,118 @@ def parse_version_offset(path):
# The first regex that matches string will be used to determine
# the version of the package. Thefore, hyperspecific regexes should
# come first while generic, catch-all regexes should come last.
+ # With that said, regular expressions are slow, so if possible, put
+ # ones that only catch one or two URLs at the bottom.
version_regexes = [
- # GitHub tarballs, e.g. v1.2.3
- (r'github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+)$', path),
+ # 1st Pass: Simplest case
+ # Assume name contains no digits and version contains no letters
+ # e.g. libpng-1.6.27
+ (r'^[a-zA-Z+._-]+[._-]v?(\d[\d._-]*)$', stem),
- # e.g. https://github.com/sam-github/libnet/tarball/libnet-1.1.4
- (r'github.com/.+/(?:zip|tar)ball/.*-((\d+\.)+\d+)$', path),
+ # 2nd Pass: Version only
+ # Assume version contains no letters
- # e.g. https://github.com/isaacs/npm/tarball/v0.2.5-1
- (r'github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+-(\d+))$', path),
+ # ver
+ # e.g. 3.2.7, 7.0.2-7, v3.3.0, v1_6_3
+ (r'^v?(\d[\d._-]*)$', stem),
- # e.g. https://github.com/petdance/ack/tarball/1.93_02
- (r'github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+_(\d+))$', path),
+ # 3rd Pass: No separator characters are used
+ # Assume name contains no digits
- # Yorick is very special.
- # e.g. https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz
- (r'github.com/[^/]+/yorick/archive/y_(\d+(?:_\d+)*)$', path),
+ # namever
+ # e.g. turbolinux702, nauty26r7
+ (r'^[a-zA-Z+]*(\d[\da-zA-Z]*)$', stem),
- # e.g. https://github.com/hpc/lwgrp/archive/v1.0.1.tar.gz
- (r'github.com/[^/]+/[^/]+/archive/(?:release-)?v?(\w+(?:[.-]\w+)*)$', path), # noqa
+ # 4th Pass: A single separator character is used
+ # Assume name contains no digits
- # e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style)
- (r'[-_](R\d+[AB]\d*(-\d+)?)', path),
+ # name-name-ver-ver
+ # e.g. panda-2016-03-07, gts-snapshot-121130, cdd-061a
+ (r'^[a-zA-Z+-]*(\d[\da-zA-Z-]*)$', stem),
- # e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz
- # e.g.,
- # https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
- (r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path),
+ # name_name_ver_ver
+ # e.g. tinyxml_2_6_2, boost_1_55_0, tbb2017_20161128, v1_6_3
+ (r'^[a-zA-Z+_]*(\d[\da-zA-Z_]*)$', stem),
- # GitLab syntax:
- # {baseUrl}{/organization}{/projectName}/repository/archive.{fileEnding}?ref={gitTag}
- # as with github releases, we hope a version can be found in the
- # git tag
- # Search dotted versions:
- # e.g., https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
- # e.g., https://example.com/org/repo/repository/archive.tar.bz2?ref=SomePrefix-2.1.1
- # e.g., http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
- (r'\?ref=(?:.*-|v)*((\d+\.)+\d+).*$', suffix),
- (r'\?version=((\d+\.)+\d+)', suffix),
+ # name.name.ver.ver
+ # e.g. prank.source.150803, jpegsrc.v9b, atlas3.11.34, geant4.10.01.p03
+ (r'^[a-zA-Z+.]*(\d[\da-zA-Z.]*)$', stem),
- # e.g. boost_1_39_0
- (r'((\d+_)+\d+)$', stem),
+ # 5th Pass: Two separator characters are used
+ # Name may contain digits, version may contain letters
- # e.g. foobar-4.5.1-1
- # e.g. ruby-1.9.1-p243
- (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem), # noqa
+ # name-name-ver.ver
+ # e.g. m4-1.4.17, gmp-6.0.0a, launchmon-v1.0.2
+ (r'^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem),
- # e.g. lame-398-1
- (r'-((\d)+-\d)', stem),
+ # name-name-ver_ver
+ # e.g. icu4c-57_1
+ (r'^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z_]*)$', stem),
- # e.g. foobar_1.2-3 or 3.98-1.4
- (r'_((\d+\.)+\d+(-(\d+(\.\d+)?))?[a-z]?)', stem),
+ # name_name_ver.ver
+ # e.g. superlu_dist_4.1, pexsi_v0.9.0
+ (r'^[a-zA-Z\d+_]+_v?(\d[\da-zA-Z.]*)$', stem),
- # e.g. foobar-4.5.1
- (r'-((\d+\.)*\d+)$', stem),
+ # name_name.ver.ver
+ # e.g. fer_source.v696
+ (r'^[a-zA-Z\d+_]+\.v?(\d[\da-zA-Z.]*)$', stem),
- # e.g. foobar-4.5.1b, foobar4.5RC, foobar.v4.5.1b
- (r'[-._]?v?((\d+\.)*\d+[-._]?([a-z]|rc|RC|tp|TP?)\d*)$', stem),
+ # name-name-ver.ver-ver.ver
+ # e.g. sowing-1.1.23-p1, bib2xhtml-v3.0-15-gf506, 4.6.3-alpha04
+ (r'^(?:[a-zA-Z\d+-]+-)?v?(\d[\da-zA-Z.-]*)$', stem),
- # e.g. foobar-4.5.0-beta1, or foobar-4.50-beta
- (r'-((\d+\.)*\d+-beta(\d+)?)$', stem),
+ # namever.ver-ver.ver
+ # e.g. go1.4-bootstrap-20161024
+ (r'^[a-zA-Z+]+v?(\d[\da-zA-Z.-]*)$', stem),
- # e.g. foobar4.5.1
- (r'((\d+\.)*\d+)$', stem),
+ # 6th Pass: All three separator characters are used
+ # Name may contain digits, version may contain letters
- # e.g. foobar-4.5.0-bin
- (r'-((\d+\.)+\d+[a-z]?)[-._](bin|dist|stable|src|sources?)$', stem),
+ # name_name-ver.ver
+ # e.g. the_silver_searcher-0.32.0, sphinx_rtd_theme-0.1.10a0
+ (r'^[a-zA-Z\d+_]+-v?(\d[\da-zA-Z.]*)$', stem),
- # e.g. dash_0.5.5.1.orig.tar.gz (Debian style)
- (r'_((\d+\.)+\d+[a-z]?)[.]orig$', stem),
+ # name.name_ver.ver-ver.ver
+ # e.g. TH.data_1.0-8, XML_3.98-1.4
+ (r'^[a-zA-Z\d+.]+_v?(\d[\da-zA-Z.-]*)$', stem),
- # e.g. http://www.openssl.org/source/openssl-0.9.8s.tar.gz
- (r'-v?([^-]+(-alpha|-beta)?)', stem),
+ # name-name-ver.ver_ver.ver
+ # e.g. pypar-2.1.5_108
+ (r'^[a-zA-Z\d+-]+-v?(\d[\da-zA-Z._]*)$', stem),
- # e.g. astyle_1.23_macosx.tar.gz
- (r'_([^_]+(_alpha|_beta)?)', stem),
+ # name.name_name-ver.ver
+ # e.g. tap.py-1.6, backports.ssl_match_hostname-3.5.0.1
+ (r'^[a-zA-Z\d+._]+-v?(\d[\da-zA-Z.]*)$', stem),
- # e.g. http://mirrors.jenkins-ci.org/war/1.486/jenkins.war
- (r'\/(\d\.\d+)\/', path),
+ # name-namever.ver_ver.ver
+ # e.g. STAR-CCM+11.06.010_02
+ (r'^[a-zA-Z+-]+(\d[\da-zA-Z._]*)$', stem),
- # e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz
- (r'\.v(\d+[a-z]?)', stem)
+ # 7th Pass: Specific VCS
+
+ # bazaar
+ # e.g. libvterm-0+bzr681
+ (r'bzr(\d[\da-zA-Z._-]*)$', stem),
+
+ # 8th Pass: Version in path
+
+ # github.com/repo/name/releases/download/vver/name
+ # e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow
+ (r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path), # noqa
+
+ # 9th Pass: Query strings
+
+ # e.g. http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0
+ (r'\?ref=[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)$', suffix),
+
+ # e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
+ (r'\?version=v?(\d[\da-zA-Z._-]*)$', suffix),
+
+ # e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz
+ (r'\?filename=[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem),
+
+ # e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz
+ (r'\?package=[a-zA-Z\d+-]+&get=[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem), # noqa
]
for i, version_regex in enumerate(version_regexes):
@@ -292,9 +506,15 @@ def parse_version_offset(path):
version = match.group(1)
start = match.start(1)
- # if we matched from the basename, then add offset in.
+ # If we matched from the stem or suffix, we need to add offset
+ offset = 0
if match_string is stem:
- start += offset
+ offset = len(path) - len(original_stem)
+ elif match_string is suffix:
+ offset = len(path)
+ if ext:
+ offset += len(ext) + 1 # .tar.gz is converted to tar.gz
+ start += offset
return version, start, len(version), i, regex
@@ -342,7 +562,7 @@ def parse_name_offset(path, v=None):
except UndetectableVersionError:
# Not all URLs contain a version. We still want to be able
# to determine a name if possible.
- v = ''
+ v = 'unknown'
# path: The prefix of the URL, everything before the ext and suffix
# ext: The file extension
@@ -350,8 +570,10 @@ def parse_name_offset(path, v=None):
path, ext, suffix = split_url_extension(path)
# stem: Everything from path after the final '/'
- stem = os.path.basename(path)
- offset = len(path) - len(stem)
+ original_stem = os.path.basename(path)
+
+ # Try to strip off anything after the package name
+ stem = strip_name_suffixes(original_stem, v)
# List of the following format:
#
@@ -363,25 +585,45 @@ def parse_name_offset(path, v=None):
# The first regex that matches string will be used to determine
# the name of the package. Thefore, hyperspecific regexes should
# come first while generic, catch-all regexes should come last.
+ # With that said, regular expressions are slow, so if possible, put
+ # ones that only catch one or two URLs at the bottom.
name_regexes = [
- (r'/sourceforge/([^/]+)/', path),
- (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' %
- (v, v), path),
- (r'/([^/]+)/(tarball|zipball)/', path),
- (r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path),
- (r'github.com/[^/]+/([^/]+)/archive', path),
- (r'[^/]+/([^/]+)/repository/archive', path), # gitlab
- (r'([^/]+)/download.php', path),
-
- (r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem
- (r'([^/]+)%s' % v, stem),
-
- # accept the path if name is not in stem.
- (r'/([^/]+)[_.-]v?%s' % v, path),
- (r'/([^/]+)%s' % v, path),
-
- (r'^([^/]+)[_.-]v?%s' % v, path),
- (r'^([^/]+)%s' % v, path)
+ # 1st Pass: Common repositories
+
+ # GitHub: github.com/repo/name/
+ # e.g. https://github.com/nco/nco/archive/4.6.2.tar.gz
+ (r'github\.com/[^/]+/([^/]+)', path),
+
+ # GitLab: gitlab.*/repo/name/
+ # e.g. http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0
+ (r'gitlab[^/]+/[^/]+/([^/]+)', path),
+
+ # Bitbucket: bitbucket.org/repo/name/
+ # e.g. https://bitbucket.org/glotzer/hoomd-blue/get/v1.3.3.tar.bz2
+ (r'bitbucket\.org/[^/]+/([^/]+)', path),
+
+ # PyPI: pypi.(python.org|io)/packages/source/first-letter/name/
+ # e.g. https://pypi.python.org/packages/source/m/mpmath/mpmath-all-0.19.tar.gz
+ # e.g. https://pypi.io/packages/source/b/backports.ssl_match_hostname/backports.ssl_match_hostname-3.5.0.1.tar.gz
+ (r'pypi\.(?:python\.org|io)/packages/source/[A-Za-z\d]/([^/]+)', path),
+
+ # 2nd Pass: Query strings
+
+ # ?filename=name-ver.ver
+ # e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz
+ (r'\?filename=([A-Za-z\d+-]+)$', stem),
+
+ # ?package=name
+ # e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz
+ (r'\?package=([A-Za-z\d+-]+)', stem),
+
+ # download.php
+ # e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
+ (r'([^/]+)/download.php$', path),
+
+ # 3rd Pass: Name followed by version in archive
+
+ (r'^([A-Za-z\d+\._-]+)$', stem),
]
for i, name_regex in enumerate(name_regexes):
@@ -391,13 +633,15 @@ def parse_name_offset(path, v=None):
name = match.group(1)
start = match.start(1)
- # if we matched from the basename, then add offset in.
+ # If we matched from the stem or suffix, we need to add offset
+ offset = 0
if match_string is stem:
- start += offset
-
- # package names should be lowercase and separated by dashes.
- name = name.lower()
- name = re.sub('[_.]', '-', name)
+ offset = len(path) - len(original_stem)
+ elif match_string is suffix:
+ offset = len(path)
+ if ext:
+ offset += len(ext) + 1 # .tar.gz is converted to tar.gz
+ start += offset
return name, start, len(name), i, regex
@@ -430,6 +674,9 @@ def parse_name_and_version(path):
The version of the package
:rtype: tuple
+
+ :raises UndetectableVersionError: If the URL does not match any regexes
+ :raises UndetectableNameError: If the URL does not match any regexes
"""
ver = parse_version(path)
name = parse_name(path, ver)
@@ -456,6 +703,22 @@ def cumsum(elts, init=0, fn=lambda x: x):
return sums
+def find_all(substring, string):
+ """Returns a list containing the indices of
+ every occurrence of substring in string."""
+
+ occurrences = []
+ index = 0
+ while index < len(string):
+ index = string.find(substring, index)
+ if index == -1:
+ break
+ occurrences.append(index)
+ index += len(substring)
+
+ return occurrences
+
+
def substitution_offsets(path):
"""This returns offsets for substituting versions and names in the
provided path. It is a helper for :func:`substitute_version`.
@@ -467,65 +730,34 @@ def substitution_offsets(path):
except UndetectableNameError:
return (None, -1, -1, (), ver, vs, vl, (vs,))
except UndetectableVersionError:
- return (None, -1, -1, (), None, -1, -1, ())
-
- # protect extensions like bz2 from getting inadvertently
- # considered versions.
- path = comp.strip_extension(path)
-
- # Construct a case-insensitive regular expression for the package name.
- name_re = '(%s)' % insensitize(name)
-
- # Split the string apart by things that match the name so that if the
- # name contains numbers or things that look like versions, we don't
- # accidentally substitute them with a version.
- name_parts = re.split(name_re, path)
-
- offsets = cumsum(name_parts, 0, len)
- name_offsets = offsets[1::2]
+ try:
+ name, ns, nl, ni, nregex = parse_name_offset(path)
+ return (name, ns, nl, (ns,), None, -1, -1, ())
+ except UndetectableNameError:
+ return (None, -1, -1, (), None, -1, -1, ())
- ver_offsets = []
- for i in xrange(0, len(name_parts), 2):
- vparts = re.split(ver, name_parts[i])
- voffsets = cumsum(vparts, offsets[i], len)
- ver_offsets.extend(voffsets[1::2])
+ # Find the index of every occurrence of name and ver in path
+ name_offsets = find_all(name, path)
+ ver_offsets = find_all(ver, path)
- return (name, ns, nl, tuple(name_offsets),
- ver, vs, vl, tuple(ver_offsets))
+ return (name, ns, nl, name_offsets,
+ ver, vs, vl, ver_offsets)
def wildcard_version(path):
"""Find the version in the supplied path, and return a regular expression
that will match this path with any version in its place.
"""
- # Get name and version, so we can treat them specially
- name, v = parse_name_and_version(path)
+ # Get version so we can replace it with a wildcard
+ version = parse_version(path)
- path, ext, suffix = split_url_extension(path)
+ # Split path by versions
+ vparts = path.split(str(version))
+
+ # Replace each version with a generic capture group to find versions
+ # and escape everything else so it's not interpreted as a regex
+ result = '(\d.*)'.join(re.escape(vp) for vp in vparts)
- # Construct a case-insensitive regular expression for the package name.
- name_re = '(%s)' % insensitize(name)
-
- # Split the string apart by things that match the name so that if the
- # name contains numbers or things that look like versions, we don't
- # catch them with the version wildcard.
- name_parts = re.split(name_re, path)
-
- # Even elements in the array did *not* match the name
- for i in xrange(0, len(name_parts), 2):
- # Split each part by things that look like versions.
- vparts = re.split(v.wildcard(), name_parts[i])
-
- # Replace each version with a generic capture group to find versions.
- # And escape everything else so it's not interpreted as a regex
- vgroup = '(%s)' % v.wildcard()
- name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts)
-
- # Put it all back together with original name matches intact.
- result = ''.join(name_parts)
- if ext:
- result += '.' + ext
- result += suffix
return result
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index d074716022..2965168056 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -22,6 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import sys
import hashlib
"""Set of acceptable hashes that Spack will use."""
@@ -104,11 +105,16 @@ class Checker(object):
def prefix_bits(byte_array, bits):
"""Return the first <bits> bits of a byte array as an integer."""
+ if sys.version_info < (3,):
+ b2i = ord # In Python 2, indexing byte_array gives str
+ else:
+ b2i = lambda b: b # In Python 3, indexing byte_array gives int
+
result = 0
n = 0
for i, b in enumerate(byte_array):
n += 8
- result = (result << 8) | ord(b)
+ result = (result << 8) | b2i(b)
if n >= bits:
break
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index 63bbbb7c92..1d7f019fdf 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
import os
import re
import subprocess
+from six import string_types
import llnl.util.tty as tty
import spack
@@ -68,7 +68,7 @@ class Executable(object):
Raise an exception if the subprocess returns an
error. Default is True. When not set, the return code is
- avaiale as `exe.returncode`.
+ available as `exe.returncode`.
ignore_errors
@@ -129,7 +129,7 @@ class Executable(object):
raise ValueError("Cannot use `str` as input stream.")
def streamify(arg, mode):
- if isinstance(arg, basestring):
+ if isinstance(arg, string_types):
return open(arg, mode), True
elif arg is str:
return subprocess.PIPE, False
@@ -178,9 +178,9 @@ class Executable(object):
if output is str or error is str:
result = ''
if output is str:
- result += out
+ result += out.decode('utf-8')
if error is str:
- result += err
+ result += err.decode('utf-8')
return result
except OSError as e:
diff --git a/lib/spack/spack/util/multiproc.py b/lib/spack/spack/util/multiproc.py
index 6a25c45713..91bac57c26 100644
--- a/lib/spack/spack/util/multiproc.py
+++ b/lib/spack/spack/util/multiproc.py
@@ -28,7 +28,6 @@ than multiprocessing.Pool.apply() can. For example, apply() will fail
to pickle functions if they're passed indirectly as parameters.
"""
from multiprocessing import Process, Pipe, Semaphore, Value
-from itertools import izip
__all__ = ['spawn', 'parmap', 'Barrier']
@@ -43,7 +42,7 @@ def spawn(f):
def parmap(f, X):
pipe = [Pipe() for x in X]
proc = [Process(target=spawn(f), args=(c, x))
- for x, (p, c) in izip(X, pipe)]
+ for x, (p, c) in zip(X, pipe)]
[p.start() for p in proc]
[p.join() for p in proc]
return [p.recv() for (p, c) in pipe]
diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py
index 9a5cdee411..cd35008aed 100644
--- a/lib/spack/spack/util/naming.py
+++ b/lib/spack/spack/util/naming.py
@@ -27,7 +27,7 @@ from __future__ import absolute_import
import string
import itertools
import re
-from StringIO import StringIO
+from six import StringIO
import spack
@@ -39,6 +39,7 @@ __all__ = [
'validate_fully_qualified_module_name',
'validate_module_name',
'possible_spack_module_names',
+ 'simplify_name',
'NamespaceTrie']
# Valid module names can contain '-' but can't start with it.
@@ -108,6 +109,50 @@ def possible_spack_module_names(python_mod_name):
return results
+def simplify_name(name):
+ """Simplifies a name which may include uppercase letters, periods,
+ underscores, and pluses. In general, we want our package names to
+ only contain lowercase letters, digits, and dashes.
+
+ :param str name: The original name of the package
+ :return: The new name of the package
+ :rtype: str
+ """
+ # Convert CamelCase to Dashed-Names
+ # e.g. ImageMagick -> Image-Magick
+ # e.g. SuiteSparse -> Suite-Sparse
+ # name = re.sub('([a-z])([A-Z])', r'\1-\2', name)
+
+ # Rename Intel downloads
+ # e.g. l_daal, l_ipp, l_mkl -> daal, ipp, mkl
+ if name.startswith('l_'):
+ name = name[2:]
+
+ # Convert UPPERCASE to lowercase
+ # e.g. SAMRAI -> samrai
+ name = name.lower()
+
+ # Replace '_' and '.' with '-'
+ # e.g. backports.ssl_match_hostname -> backports-ssl-match-hostname
+ name = name.replace('_', '-')
+ name = name.replace('.', '-')
+
+ # Replace "++" with "pp" and "+" with "-plus"
+ # e.g. gtk+ -> gtk-plus
+ # e.g. voro++ -> voropp
+ name = name.replace('++', 'pp')
+ name = name.replace('+', '-plus')
+
+ # Simplify Lua package names
+ # We don't want "lua" to occur multiple times in the name
+ name = re.sub('^(lua)([^-])', r'\1-\2', name)
+
+ # Simplify Bio++ package names
+ name = re.sub('^(bpp)([^-])', r'\1-\2', name)
+
+ return name
+
+
def valid_module_name(mod_name):
"""Return whether mod_name is valid for use in Spack."""
return bool(re.match(_valid_module_re, mod_name))
diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py
index b5731ccf08..7a1109f2d2 100644
--- a/lib/spack/spack/util/pattern.py
+++ b/lib/spack/spack/util/pattern.py
@@ -61,7 +61,7 @@ def composite(interface=None, method_list=None, container=list):
# Retrieve the base class of the composite. Inspect its methods and
# decide which ones will be overridden
def no_special_no_private(x):
- return inspect.ismethod(x) and not x.__name__.startswith('_')
+ return callable(x) and not x.__name__.startswith('_')
# Patch the behavior of each of the methods in the previous list.
# This is done associating an instance of the descriptor below to
@@ -90,42 +90,25 @@ def composite(interface=None, method_list=None, container=list):
return getter
dictionary_for_type_call = {}
+
# Construct a dictionary with the methods explicitly passed as name
if method_list is not None:
- # python@2.7: method_list_dict = {name: IterateOver(name) for name
- # in method_list}
- method_list_dict = {}
- for name in method_list:
- method_list_dict[name] = IterateOver(name)
- dictionary_for_type_call.update(method_list_dict)
+ dictionary_for_type_call.update(
+ (name, IterateOver(name)) for name in method_list)
+
# Construct a dictionary with the methods inspected from the interface
if interface is not None:
- ##########
- # python@2.7: interface_methods = {name: method for name, method in
- # inspect.getmembers(interface, predicate=no_special_no_private)}
- interface_methods = {}
- for name, method in inspect.getmembers(
- interface, predicate=no_special_no_private):
- interface_methods[name] = method
- ##########
- # python@2.7: interface_methods_dict = {name: IterateOver(name,
- # method) for name, method in interface_methods.iteritems()}
- interface_methods_dict = {}
- for name, method in interface_methods.iteritems():
- interface_methods_dict[name] = IterateOver(name, method)
- ##########
- dictionary_for_type_call.update(interface_methods_dict)
+ dictionary_for_type_call.update(
+ (name, IterateOver(name, method))
+ for name, method in inspect.getmembers(
+ interface, predicate=no_special_no_private))
+
# Get the methods that are defined in the scope of the composite
# class and override any previous definition
- ##########
- # python@2.7: cls_method = {name: method for name, method in
- # inspect.getmembers(cls, predicate=inspect.ismethod)}
- cls_method = {}
- for name, method in inspect.getmembers(
- cls, predicate=inspect.ismethod):
- cls_method[name] = method
- ##########
- dictionary_for_type_call.update(cls_method)
+ dictionary_for_type_call.update(
+ (name, method) for name, method in inspect.getmembers(
+ cls, predicate=inspect.ismethod))
+
# Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes?
wrapper_class = type(cls.__name__, (cls, container),
diff --git a/lib/spack/spack/util/prefix.py b/lib/spack/spack/util/prefix.py
index 985d862269..bc6808f350 100644
--- a/lib/spack/spack/util/prefix.py
+++ b/lib/spack/spack/util/prefix.py
@@ -35,11 +35,11 @@ class Prefix(str):
For example, you can do something like this::
prefix = Prefix('/usr')
- print prefix.lib
- print prefix.lib64
- print prefix.bin
- print prefix.share
- print prefix.man4
+ print(prefix.lib)
+ print(prefix.lib64)
+ print(prefix.bin)
+ print(prefix.share)
+ print(prefix.man4)
This program would print:
@@ -52,7 +52,7 @@ class Prefix(str):
Prefix objects behave identically to strings. In fact, they
subclass str. So operators like + are legal:
- print "foobar " + prefix
+ print("foobar " + prefix)
This prints 'foobar /usr". All of this is meant to make custom
installs easy.
diff --git a/lib/spack/spack/util/spack_json.py b/lib/spack/spack/util/spack_json.py
index 236eef8983..82fa700821 100644
--- a/lib/spack/spack/util/spack_json.py
+++ b/lib/spack/spack/util/spack_json.py
@@ -23,7 +23,11 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""Simple wrapper around JSON to guarantee consistent use of load/dump. """
+import sys
import json
+from six import string_types
+from six import iteritems
+
import spack.error
__all__ = ['load', 'dump', 'SpackJSONError']
@@ -36,12 +40,12 @@ _json_dump_args = {
def load(stream):
"""Spack JSON needs to be ordered to support specs."""
- if isinstance(stream, basestring):
- return _byteify(json.loads(stream, object_hook=_byteify),
- ignore_dicts=True)
+ if isinstance(stream, string_types):
+ load = json.loads
else:
- return _byteify(json.load(stream, object_hook=_byteify),
- ignore_dicts=True)
+ load = json.load
+
+ return _strify(load(stream, object_hook=_strify), ignore_dicts=True)
def dump(data, stream=None):
@@ -52,19 +56,23 @@ def dump(data, stream=None):
return json.dump(data, stream, **_json_dump_args)
-def _byteify(data, ignore_dicts=False):
- # if this is a unicode string, return its string representation
- if isinstance(data, unicode):
- return data.encode('utf-8')
+def _strify(data, ignore_dicts=False):
+ # if this is a unicode string in python 2, return its string representation
+ if sys.version_info[0] < 3:
+ if isinstance(data, unicode):
+ return data.encode('utf-8')
+
# if this is a list of values, return list of byteified values
if isinstance(data, list):
- return [_byteify(item, ignore_dicts=True) for item in data]
+ return [_strify(item, ignore_dicts=True) for item in data]
+
# if this is a dictionary, return dictionary of byteified keys and values
# but only if we haven't already byteified it
if isinstance(data, dict) and not ignore_dicts:
- return dict((_byteify(key, ignore_dicts=True),
- _byteify(value, ignore_dicts=True)) for key, value in
- data.iteritems())
+ return dict((_strify(key, ignore_dicts=True),
+ _strify(value, ignore_dicts=True)) for key, value in
+ iteritems(data))
+
# if it's anything else, return it in its original form
return data
@@ -72,5 +80,5 @@ def _byteify(data, ignore_dicts=False):
class SpackJSONError(spack.error.SpackError):
"""Raised when there are issues with JSON parsing."""
- def __init__(self, msg, yaml_error):
- super(SpackJSONError, self).__init__(msg, str(yaml_error))
+ def __init__(self, msg, json_error):
+ super(SpackJSONError, self).__init__(msg, str(json_error))
diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py
index 9d4c607908..6533004392 100644
--- a/lib/spack/spack/util/spack_yaml.py
+++ b/lib/spack/spack/util/spack_yaml.py
@@ -85,13 +85,7 @@ class OrderedLineLoader(Loader):
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
- try:
- value = value.encode('ascii')
- except UnicodeEncodeError:
- pass
-
value = syaml_str(value)
-
mark(value, node)
return value
@@ -137,7 +131,7 @@ class OrderedLineLoader(Loader):
key = self.construct_object(key_node, deep=deep)
try:
hash(key)
- except TypeError, exc:
+ except TypeError as exc:
raise ConstructorError(
"while constructing a mapping", node.start_mark,
"found unacceptable key (%s)" % exc, key_node.start_mark)
@@ -154,11 +148,11 @@ class OrderedLineLoader(Loader):
# register above new constructors
OrderedLineLoader.add_constructor(
- u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
+ 'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
OrderedLineLoader.add_constructor(
- u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
+ 'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
OrderedLineLoader.add_constructor(
- u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
+ 'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
class OrderedLineDumper(Dumper):
@@ -181,7 +175,7 @@ class OrderedLineDumper(Dumper):
# if it's a syaml_dict, preserve OrderedDict order.
# Otherwise do the default thing.
sort = not isinstance(mapping, syaml_dict)
- mapping = mapping.items()
+ mapping = list(mapping.items())
if sort:
mapping.sort()
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 935532266f..f803c6cea3 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -25,10 +25,23 @@
import re
import os
import sys
-import urllib2
-import urlparse
-from multiprocessing import Pool
-from HTMLParser import HTMLParser, HTMLParseError
+import traceback
+
+from six.moves.urllib.request import urlopen, Request
+from six.moves.urllib.error import URLError
+from six.moves.urllib.parse import urljoin
+import multiprocessing.pool
+
+try:
+ # Python 2 had these in the HTMLParser package.
+ from HTMLParser import HTMLParser, HTMLParseError
+except ImportError:
+ # In Python 3, things moved to html.parser
+ from html.parser import HTMLParser
+
+ # Also, HTMLParseError is deprecated and never raised.
+ class HTMLParseError(Exception):
+ pass
import llnl.util.tty as tty
@@ -55,34 +68,51 @@ class LinkParser(HTMLParser):
self.links.append(val)
-def _spider(args):
- """_spider(url, depth, max_depth)
+class NonDaemonProcess(multiprocessing.Process):
+ """Process tha allows sub-processes, so pools can have sub-pools."""
+ def _get_daemon(self):
+ return False
- Fetches URL and any pages it links to up to max_depth. depth should
- initially be 1, and max_depth includes the root. This function will
- print out a warning only if the root can't be fetched; it ignores
- errors with pages that the root links to.
+ def _set_daemon(self, value):
+ pass
- This will return a list of the pages fetched, in no particular order.
+ daemon = property(_get_daemon, _set_daemon)
- Takes args as a tuple b/c it's intended to be used by a multiprocessing
- pool. Firing off all the child links at once makes the fetch MUCH
- faster for pages with lots of children.
- """
- url, visited, root, opener, depth, max_depth, raise_on_error = args
+class NonDaemonPool(multiprocessing.pool.Pool):
+ """Pool that uses non-daemon processes"""
+ Process = NonDaemonProcess
+
+
+def _spider(url, visited, root, depth, max_depth, raise_on_error):
+ """Fetches URL and any pages it links to up to max_depth.
+
+ depth should initially be zero, and max_depth is the max depth of
+ links to follow from the root.
+
+ Prints out a warning only if the root can't be fetched; it ignores
+ errors with pages that the root links to.
+
+ Returns a tuple of:
+ - pages: dict of pages visited (URL) mapped to their full text.
+ - links: set of links encountered while visiting the pages.
+ """
pages = {} # dict from page URL -> text content.
links = set() # set of all links seen on visited pages.
+ # root may end with index.html -- chop that off.
+ if root.endswith('/index.html'):
+ root = re.sub('/index.html$', '', root)
+
try:
# Make a HEAD request first to check the content type. This lets
# us ignore tarballs and gigantic files.
# It would be nice to do this with the HTTP Accept header to avoid
# one round-trip. However, most servers seem to ignore the header
# if you ask for a tarball with Accept: text/html.
- req = urllib2.Request(url)
+ req = Request(url)
req.get_method = lambda: "HEAD"
- resp = urllib2.urlopen(req, timeout=TIMEOUT)
+ resp = urlopen(req, timeout=TIMEOUT)
if "Content-type" not in resp.headers:
tty.debug("ignoring page " + url)
@@ -95,11 +125,11 @@ def _spider(args):
# Do the real GET request when we know it's just HTML.
req.get_method = lambda: "GET"
- response = urllib2.urlopen(req, timeout=TIMEOUT)
+ response = urlopen(req, timeout=TIMEOUT)
response_url = response.geturl()
# Read the page and and stick it in the map we'll return
- page = response.read()
+ page = response.read().decode('utf-8')
pages[response_url] = page
# Parse out the links in the page
@@ -109,7 +139,7 @@ def _spider(args):
while link_parser.links:
raw_link = link_parser.links.pop()
- abs_link = urlparse.urljoin(response_url, raw_link.strip())
+ abs_link = urljoin(response_url, raw_link.strip())
links.add(abs_link)
@@ -127,22 +157,24 @@ def _spider(args):
# If we're not at max depth, follow links.
if depth < max_depth:
- subcalls.append((abs_link, visited, root, None,
+ subcalls.append((abs_link, visited, root,
depth + 1, max_depth, raise_on_error))
visited.add(abs_link)
if subcalls:
+ pool = NonDaemonPool(processes=len(subcalls))
try:
- pool = Pool(processes=len(subcalls))
- results = pool.map(_spider, subcalls)
+ results = pool.map(_spider_wrapper, subcalls)
+
for sub_pages, sub_links in results:
pages.update(sub_pages)
links.update(sub_links)
+
finally:
pool.terminate()
pool.join()
- except urllib2.URLError as e:
+ except URLError as e:
tty.debug(e)
if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url)
@@ -159,46 +191,53 @@ def _spider(args):
except Exception as e:
# Other types of errors are completely ignored, except in debug mode.
- tty.debug("Error in _spider: %s" % e)
+ tty.debug("Error in _spider: %s:%s" % (type(e), e),
+ traceback.format_exc())
return pages, links
-def spider(root_url, **kwargs):
+def _spider_wrapper(args):
+ """Wrapper for using spider with multiprocessing."""
+ return _spider(*args)
+
+
+def spider(root_url, depth=0):
+
"""Gets web pages from a root URL.
- If depth is specified (e.g., depth=2), then this will also fetches pages
- linked from the root and its children up to depth.
+
+ If depth is specified (e.g., depth=2), then this will also follow
+ up to <depth> levels of links from the root.
This will spawn processes to fetch the children, for much improved
performance over a sequential fetch.
+
"""
- max_depth = kwargs.setdefault('depth', 1)
- pages, links = _spider((root_url, set(), root_url, None,
- 1, max_depth, False))
+ pages, links = _spider(root_url, set(), root_url, 0, depth, False)
return pages, links
-def find_versions_of_archive(*archive_urls, **kwargs):
+def find_versions_of_archive(archive_urls, list_url=None, list_depth=0):
"""Scrape web pages for new versions of a tarball.
Arguments:
archive_urls:
- URLs for different versions of a package. Typically these
- are just the tarballs from the package file itself. By
- default, this searches the parent directories of archives.
+ URL or sequence of URLs for different versions of a
+ package. Typically these are just the tarballs from the package
+ file itself. By default, this searches the parent directories
+ of archives.
Keyword Arguments:
list_url:
-
URL for a listing of archives. Spack wills scrape these
pages for download links that look like the archive URL.
list_depth:
- Max depth to follow links on list_url pages.
+ Max depth to follow links on list_url pages. Default 0.
"""
- list_url = kwargs.get('list_url', None)
- list_depth = kwargs.get('list_depth', 1)
+ if not isinstance(archive_urls, (list, tuple)):
+ archive_urls = [archive_urls]
# Generate a list of list_urls based on archive urls and any
# explicitly listed list_url in the package
@@ -229,6 +268,14 @@ def find_versions_of_archive(*archive_urls, **kwargs):
# part, not the full path.
url_regex = os.path.basename(url_regex)
+ # We need to add a / to the beginning of the regex to prevent
+ # Spack from picking up similarly named packages like:
+ # https://cran.r-project.org/src/contrib/pls_2.6-0.tar.gz
+ # https://cran.r-project.org/src/contrib/enpls_5.7.tar.gz
+ # https://cran.r-project.org/src/contrib/autopls_1.3.tar.gz
+ # https://cran.r-project.org/src/contrib/matrixpls_1.0.4.tar.gz
+ url_regex = '/' + url_regex
+
# We need to add a $ anchor to the end of the regex to prevent
# Spack from picking up signature files like:
# .asc
@@ -236,7 +283,9 @@ def find_versions_of_archive(*archive_urls, **kwargs):
# .sha256
# .sig
# However, SourceForge downloads still need to end in '/download'.
- regexes.append(url_regex + '(\/download)?$')
+ url_regex += '(\/download)?$'
+
+ regexes.append(url_regex)
# Build a dict version -> URL from any links that match the wildcards.
versions = {}
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 0d68a709e8..89fcc9aaa7 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -47,8 +47,8 @@ import re
import numbers
from bisect import bisect_left
from functools import wraps
+from six import string_types
-from functools_backport import total_ordering
from spack.util.spack_yaml import syaml_dict
__all__ = ['Version', 'VersionRange', 'VersionList', 'ver']
@@ -111,7 +111,6 @@ def _numeric_lt(self0, other):
"""Compares two versions, knowing they're both numeric"""
-@total_ordering
class Version(object):
"""Class to represent versions"""
@@ -195,52 +194,29 @@ class Version(object):
nother = len(other.version)
return nother <= nself and self.version[:nother] == other.version
- def wildcard(self):
- """Create a regex that will match variants of this version string."""
- def a_or_n(seg):
- if type(seg) == int:
- return r'[0-9]+'
- else:
- return r'[a-zA-Z]+'
-
- version = self.version
-
- # Use a wildcard for separators, in case a version is written
- # two different ways (e.g., boost writes 1_55_0 and 1.55.0)
- sep_re = '[_.-]'
- separators = ('',) + (sep_re,) * len(self.separators)
-
- version += (version[-1],) * 2
- separators += (sep_re,) * 2
-
- segments = [a_or_n(seg) for seg in version]
-
- wc = segments[0]
- for i in xrange(1, len(separators)):
- wc += '(?:' + separators[i] + segments[i]
-
- # Add possible alpha or beta indicator at the end of each segemnt
- # We treat these specially b/c they're so common.
- wc += '(?:[a-z]|alpha|beta)?)?' * (len(segments) - 1)
- return wc
-
def __iter__(self):
return iter(self.version)
def __getitem__(self, idx):
cls = type(self)
+
if isinstance(idx, numbers.Integral):
return self.version[idx]
+
elif isinstance(idx, slice):
# Currently len(self.separators) == len(self.version) - 1
extendend_separators = self.separators + ('',)
string_arg = []
- for token, sep in zip(self.version, extendend_separators)[idx]:
+
+ pairs = zip(self.version[idx], extendend_separators[idx])
+ for token, sep in pairs:
string_arg.append(str(token))
string_arg.append(str(sep))
+
string_arg.pop() # We don't need the last separator
string_arg = ''.join(string_arg)
return cls(string_arg)
+
message = '{cls.__name__} indices must be integers'
raise TypeError(message.format(cls=cls))
@@ -323,9 +299,22 @@ class Version(object):
return (other is not None and
type(other) == Version and self.version == other.version)
+ @coerced
def __ne__(self, other):
return not (self == other)
+ @coerced
+ def __le__(self, other):
+ return self == other or self < other
+
+ @coerced
+ def __ge__(self, other):
+ return not (self < other)
+
+ @coerced
+ def __gt__(self, other):
+ return not (self == other) and not (self < other)
+
def __hash__(self):
return hash(self.version)
@@ -371,13 +360,12 @@ class Version(object):
return VersionList()
-@total_ordering
class VersionRange(object):
def __init__(self, start, end):
- if isinstance(start, basestring):
+ if isinstance(start, string_types):
start = Version(start)
- if isinstance(end, basestring):
+ if isinstance(end, string_types):
end = Version(end)
self.start = start
@@ -414,9 +402,22 @@ class VersionRange(object):
type(other) == VersionRange and
self.start == other.start and self.end == other.end)
+ @coerced
def __ne__(self, other):
return not (self == other)
+ @coerced
+ def __le__(self, other):
+ return self == other or self < other
+
+ @coerced
+ def __ge__(self, other):
+ return not (self < other)
+
+ @coerced
+ def __gt__(self, other):
+ return not (self == other) and not (self < other)
+
@property
def concrete(self):
return self.start if self.start == self.end else None
@@ -561,14 +562,13 @@ class VersionRange(object):
return out
-@total_ordering
class VersionList(object):
"""Sorted, non-redundant list of Versions and VersionRanges."""
def __init__(self, vlist=None):
self.versions = []
if vlist is not None:
- if isinstance(vlist, basestring):
+ if isinstance(vlist, string_types):
vlist = _string_to_version(vlist)
if type(vlist) == VersionList:
self.versions = vlist.versions
@@ -754,6 +754,7 @@ class VersionList(object):
def __eq__(self, other):
return other is not None and self.versions == other.versions
+ @coerced
def __ne__(self, other):
return not (self == other)
@@ -761,6 +762,18 @@ class VersionList(object):
def __lt__(self, other):
return other is not None and self.versions < other.versions
+ @coerced
+ def __le__(self, other):
+ return self == other or self < other
+
+ @coerced
+ def __ge__(self, other):
+ return not (self < other)
+
+ @coerced
+ def __gt__(self, other):
+ return not (self == other) and not (self < other)
+
def __hash__(self):
return hash(tuple(self.versions))
@@ -796,7 +809,7 @@ def ver(obj):
"""
if isinstance(obj, (list, tuple)):
return VersionList(obj)
- elif isinstance(obj, basestring):
+ elif isinstance(obj, string_types):
return _string_to_version(obj)
elif isinstance(obj, (int, float)):
return _string_to_version(str(obj))
diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash
index 61cbb27243..819dcc06ab 100755
--- a/share/spack/spack-completion.bash
+++ b/share/spack/spack-completion.bash
@@ -589,8 +589,8 @@ function _spack_providers {
then
compgen -W "-h --help" -- "$cur"
else
- compgen -W "blas daal elf golang ipp lapack mkl
- mpe mpi pil scalapack" -- "$cur"
+ compgen -W "awk blas daal elf golang ipp lapack mkl
+ mpe mpi opencl openfoam pil scalapack" -- "$cur"
fi
}
@@ -732,20 +732,21 @@ function _spack_url {
then
compgen -W "-h --help" -- "$cur"
else
- compgen -W "list parse test" -- "$cur"
+ compgen -W "list parse summary" -- "$cur"
fi
}
function _spack_url_list {
- compgen -W "-h --help -c --color -e --extrapolation -n --incorrect-name
- -v --incorrect-version" -- "$cur"
+ compgen -W "-h --help -c --color -e --extrapolation
+ -n --incorrect-name -N --correct-name
+ -v --incorrect-version -V --correct-version" -- "$cur"
}
function _spack_url_parse {
compgen -W "-h --help -s --spider" -- "$cur"
}
-function _spack_url_test {
+function _spack_url_summary {
compgen -W "-h --help" -- "$cur"
}
diff --git a/var/spack/repos/builtin.mock/packages/conflict-parent/package.py b/var/spack/repos/builtin.mock/packages/conflict-parent/package.py
new file mode 100644
index 0000000000..37805537a2
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/conflict-parent/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class ConflictParent(Package):
+ homepage = 'https://github.com/tgamblin/callpath'
+ url = 'http://github.com/tgamblin/callpath-1.0.tar.gz'
+
+ version(0.8, 'foobarbaz')
+ version(0.9, 'foobarbaz')
+ version(1.0, 'foobarbaz')
+
+ depends_on('conflict')
+
+ conflicts('^conflict~foo', when='@0.9')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
+
+ def setup_environment(self, senv, renv):
+ renv.set('FOOBAR', self.name)
diff --git a/var/spack/repos/builtin.mock/packages/conflict/package.py b/var/spack/repos/builtin.mock/packages/conflict/package.py
new file mode 100644
index 0000000000..a6ba4b5c58
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/conflict/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Conflict(Package):
+ homepage = 'https://github.com/tgamblin/callpath'
+ url = 'http://github.com/tgamblin/callpath-1.0.tar.gz'
+
+ version(0.8, 'foobarbaz')
+ version(0.9, 'foobarbaz')
+ version(1.0, 'foobarbaz')
+
+ variant('foo', default=True, description='')
+
+ conflicts('%clang', when='+foo')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
+
+ def setup_environment(self, senv, renv):
+ renv.set('FOOBAR', self.name)
diff --git a/var/spack/repos/builtin.mock/packages/multimethod/package.py b/var/spack/repos/builtin.mock/packages/multimethod/package.py
index 9e18d65cbb..c0e347bc93 100644
--- a/var/spack/repos/builtin.mock/packages/multimethod/package.py
+++ b/var/spack/repos/builtin.mock/packages/multimethod/package.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from six import string_types
+
from spack import *
import spack.architecture
@@ -102,14 +104,14 @@ class Multimethod(MultimethodBase):
# Make sure we can switch methods on different target
#
platform = spack.architecture.platform()
- targets = platform.targets.values()
+ targets = list(platform.targets.values())
if len(targets) > 1:
targets = targets[:-1]
for target in targets:
@when('target=' + target.name)
def different_by_target(self):
- if isinstance(self.spec.architecture.target, basestring):
+ if isinstance(self.spec.architecture.target, string_types):
return self.spec.architecture.target
else:
return self.spec.architecture.target.name
diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py
index 70249aebac..dd6685a829 100644
--- a/var/spack/repos/builtin/packages/ack/package.py
+++ b/var/spack/repos/builtin/packages/ack/package.py
@@ -35,6 +35,7 @@ class Ack(Package):
homepage = "http://beyondgrep.com/"
url = "http://beyondgrep.com/ack-2.14-single-file"
+ version('2.16', '7085b5a5c76fda43ff049410870c8535', expand=False)
version('2.14', 'e74150a1609d28a70b450ef9cc2ed56b', expand=False)
depends_on('perl')
diff --git a/var/spack/repos/builtin/packages/ant/package.py b/var/spack/repos/builtin/packages/ant/package.py
index 19f03e1e53..5267b4ee0d 100644
--- a/var/spack/repos/builtin/packages/ant/package.py
+++ b/var/spack/repos/builtin/packages/ant/package.py
@@ -36,6 +36,7 @@ class Ant(Package):
# 1.10.0 requires newer Java, not yet tested....
# version('1.10.0', '2260301bb7734e34d8b96f1a5fd7979c')
+ version('1.9.9', '22c9d40dabafbec348aaada226581239')
version('1.9.8', '16253d516d5c33c4af9ef8fafcf1004b')
version('1.9.7', 'a2fd9458c76700b7be51ef12f07d4bb1')
diff --git a/var/spack/repos/builtin/packages/archer/package.py b/var/spack/repos/builtin/packages/archer/package.py
index 31a1d498da..247743ed0d 100644
--- a/var/spack/repos/builtin/packages/archer/package.py
+++ b/var/spack/repos/builtin/packages/archer/package.py
@@ -30,12 +30,12 @@ class Archer(Package):
"""ARCHER, a data race detection tool for large OpenMP applications."""
homepage = "https://github.com/PRUNERS/ARCHER"
+ url = "https://github.com/PRUNERS/archer/archive/v1.0.0.tar.gz"
- version('1.0.0b', git='https://github.com/PRUNERS/ARCHER.git',
- commit='2cf7ead36358842871d5bd9c33d499f62bf8dd38')
+ version('1.0.0', '790bfaf00b9f57490eb609ecabfe954a')
depends_on('cmake', type='build')
- depends_on('llvm+clang~gold')
+ depends_on('llvm')
depends_on('ninja', type='build')
depends_on('llvm-openmp-ompt')
diff --git a/var/spack/repos/builtin/packages/autogen/package.py b/var/spack/repos/builtin/packages/autogen/package.py
index 0bfe6159c9..e79af636b5 100644
--- a/var/spack/repos/builtin/packages/autogen/package.py
+++ b/var/spack/repos/builtin/packages/autogen/package.py
@@ -34,7 +34,7 @@ class Autogen(AutotoolsPackage):
homepage = "https://www.gnu.org/software/autogen/index.html"
url = "https://ftp.gnu.org/gnu/autogen/rel5.18.12/autogen-5.18.12.tar.gz"
list_url = "https://ftp.gnu.org/gnu/autogen"
- list_depth = 2
+ list_depth = 1
version('5.18.12', '551d15ccbf5b5fc5658da375d5003389')
diff --git a/var/spack/repos/builtin/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py
index 8643f5d836..4f022e5cad 100644
--- a/var/spack/repos/builtin/packages/automake/package.py
+++ b/var/spack/repos/builtin/packages/automake/package.py
@@ -29,7 +29,7 @@ class Automake(AutotoolsPackage):
"""Automake -- make file builder part of autotools"""
homepage = 'http://www.gnu.org/software/automake/'
- url = 'http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz'
+ url = 'http://ftp.gnu.org/gnu/automake/automake-1.15.tar.gz'
version('1.15', '716946a105ca228ab545fc37a70df3a3')
version('1.14.1', 'd052a3e884631b9c7892f2efce542d75')
diff --git a/var/spack/repos/builtin/packages/bats/package.py b/var/spack/repos/builtin/packages/bats/package.py
new file mode 100644
index 0000000000..e68dd7a48d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bats/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bats(Package):
+ """Bats is a TAP-compliant testing framework for Bash."""
+
+ homepage = "https://github.com/sstephenson/bats"
+ url = "https://github.com/sstephenson/bats/archive/v0.4.0.tar.gz"
+
+ version('0.4.0', 'aeeddc0b36b8321930bf96fce6ec41ee')
+
+ def install(self, spec, prefix):
+ bash = which("bash")
+ bash('install.sh', prefix)
diff --git a/var/spack/repos/builtin/packages/bcftools/package.py b/var/spack/repos/builtin/packages/bcftools/package.py
index a1b4a06dbb..b9954c328a 100644
--- a/var/spack/repos/builtin/packages/bcftools/package.py
+++ b/var/spack/repos/builtin/packages/bcftools/package.py
@@ -34,9 +34,11 @@ class Bcftools(Package):
homepage = "http://samtools.github.io/bcftools/"
url = "https://github.com/samtools/bcftools/releases/download/1.3.1/bcftools-1.3.1.tar.bz2"
+ version('1.4', '50ccf0a073bd70e99cdb3c8be830416e')
version('1.3.1', '575001e9fca37cab0c7a7287ad4b1cdb')
depends_on('zlib')
+ depends_on('bzip2', when="@1.4:")
def install(self, spec, prefix):
make("prefix=%s" % prefix, "all")
diff --git a/var/spack/repos/builtin/packages/bib2xhtml/package.py b/var/spack/repos/builtin/packages/bib2xhtml/package.py
index b356038180..56038eea18 100644
--- a/var/spack/repos/builtin/packages/bib2xhtml/package.py
+++ b/var/spack/repos/builtin/packages/bib2xhtml/package.py
@@ -33,9 +33,6 @@ class Bib2xhtml(Package):
version('3.0-15-gf506', 'a26ba02fe0053bbbf2277bdf0acf8645')
- def url_for_version(self, v):
- return ('http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v%s.tar.gz' % v)
-
def install(self, spec, prefix):
# Add the bst include files to the install directory
bst_include = join_path(prefix.share, 'bib2xhtml')
diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py
index e87c7da7ce..e9bfa32b39 100644
--- a/var/spack/repos/builtin/packages/bison/package.py
+++ b/var/spack/repos/builtin/packages/bison/package.py
@@ -31,9 +31,10 @@ class Bison(AutotoolsPackage):
generalized LR (GLR) parser employing LALR(1) parser tables."""
homepage = "http://www.gnu.org/software/bison/"
- url = "http://ftp.gnu.org/gnu/bison/bison-3.0.tar.gz"
+ url = "http://ftp.gnu.org/gnu/bison/bison-3.0.4.tar.gz"
version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8')
+ version('2.7', 'ded660799e76fb1667d594de1f7a0da9')
depends_on('m4', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/blast-plus/blast-make-fix2.5.0.diff b/var/spack/repos/builtin/packages/blast-plus/blast-make-fix2.5.0.diff
new file mode 100644
index 0000000000..5611a0e83b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/blast-plus/blast-make-fix2.5.0.diff
@@ -0,0 +1,22 @@
+--- ncbi-blast-2.5.0+-src/c++/src/build-system/Makefile.in.top 2014-11-12 17:41:55.000000000 +0100
++++ MakeFile 2016-12-19 18:00:58.000000000 +0100
+@@ -1,4 +1,4 @@
+-# $Id: Makefile.in.top 451817 2014-11-12 16:41:55Z ucko $
++# $Id$
+ # Top-level meta-makefile that simplifies building even further.
+
+ # include @builddir@/Makefile.mk
+@@ -49,9 +49,10 @@
+ for x in *.a; do \
+ $(LN_S) "$$x" "`$(BASENAME) \"$$x\" .a`-static.a"; \
+ done
+- cd $(includedir0) && find * -name CVS -prune -o -print |\
+- cpio -pd $(pincludedir)
+- $(INSTALL) -m 644 $(incdir)/* $(pincludedir)
++ #for d in $(includedir0) $(incdir); do \
++ # cd $$d && find * -name .svn prune -o -print | \
++ # cpio -pd $(pincludedir) ; \
++ #done
+ ## set up appropriate build and status directories somewhere under $(libdir)?
+
+ install-gbench:
diff --git a/var/spack/repos/builtin/packages/blast-plus/package.py b/var/spack/repos/builtin/packages/blast-plus/package.py
new file mode 100644
index 0000000000..53f09c03a3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/blast-plus/package.py
@@ -0,0 +1,202 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# This is a based largely on the Homebrew science formula:
+# https://github.com/Homebrew/homebrew-science/blob/master/blast.rb
+#
+# There s one tricky bit to be resolved:
+#
+# - HDF5 builds explode, blast's configure script tries to run a program that
+# uses a variable called 'HOST' but some other bit defines a macro called
+# HOST that's defined to a string. Hilarity ensues.
+#
+#
+from spack import *
+
+
+class BlastPlus(AutotoolsPackage):
+ """Basic Local Alignment Search Tool."""
+
+ homepage = "http://blast.ncbi.nlm.nih.gov/"
+ url = "https://ftp.ncbi.nlm.nih.gov/blast/executables/blast+/2.6.0/ncbi-blast-2.6.0+-src.tar.gz"
+
+ version('2.6.0', 'c8ce8055b10c4d774d995f88c7cc6225')
+ version('2.2.30', 'f8e9a5eb368173142fe6867208b73715')
+
+ # homebrew sez: Fixed upstream in future version > 2.6
+ # But this bug sez that it will be fixed in 2.6
+ # https://github.com/Homebrew/homebrew-science/pull/4740
+ # The 2.6.0 src still matches the "before" bit of the patch
+ # so it's probably still "needed".
+ # On the other hand, the `find` command is broken and there
+ # aren't any .svn dirs in the tree, so I've updated their patch
+ # to just comment out the block.
+ patch('blast-make-fix2.5.0.diff', when="@2.5.0:2.6.0")
+
+ # See https://github.com/Homebrew/homebrew-science/issues/2337#issuecomment-170011511
+ @when('@:2.2.31')
+ def patch(self):
+ filter_file("2.95* | 2.96* | 3.* | 4.* )",
+ "2.95* | 2.96* | 3.* | 4.* | 5.* )",
+ "c++/src/build-system/configure",
+ string=True)
+
+ # No...
+ # depends_on :mysql => :optional
+
+ variant('static', default=False,
+ description='Build with static linkage')
+ variant('jpeg', default=True,
+ description='Build with jpeg support')
+ variant('png', default=True,
+ description='Build with png support')
+ variant('freetype', default=True,
+ description='Build with freetype support')
+ # variant('hdf5', default=True,
+ # description='Build with hdf5 support')
+ variant('gnutls', default=True,
+ description='Build with gnutls support')
+ variant('openssl', default=True,
+ description='Build with openssl support')
+ variant('zlib', default=True,
+ description='Build with zlib support')
+ variant('bzip2', default=True,
+ description='Build with bzip2 support')
+ variant('lzo', default=True,
+ description='Build with lzo support')
+ variant('pcre', default=True,
+ description='Build with pcre support')
+
+ depends_on('jpeg', when='+jpeg')
+ depends_on('libpng', when='+png')
+ depends_on('freetype', when='+freetype')
+ # depends_on('hdf5', when='+hdf5')
+ depends_on('gnutls', when='+gnutls')
+ depends_on('openssl', when='+openssl')
+ depends_on('zlib', when='+zlib')
+ depends_on('bzip2', when='+bzip2')
+ depends_on('lzo', when='+lzo')
+ depends_on('pcre', when='+pcre')
+
+ depends_on('python')
+
+ configure_directory = 'c++'
+
+ def configure_args(self):
+ spec = self.spec
+
+ config_args = [
+ '--with-bin-release',
+ '--without-debug',
+ '--with-mt',
+ '--with-64',
+ '--without-boost',
+ ]
+
+ if '+static' in spec:
+ config_args.append('--with-static')
+ # FIXME
+ # args << "--with-static-exe" unless OS.linux?
+ # args << "--with-dll" if build.with? "dll"
+ else:
+ config_args.extend([
+ '--with-dll',
+ '--without-static',
+ '--without-static-exe'
+ ])
+
+ if '+jpeg' in spec:
+ config_args.append(
+ '--with-jpeg={0}'.format(self.spec['jpeg'].prefix)
+ )
+ else:
+ config_args.append('--without-jpeg')
+
+ if '+png' in spec:
+ config_args.append(
+ '--with-png={0}'.format(self.spec['libpng'].prefix)
+ )
+ else:
+ config_args.append('--without-png')
+
+ if '+freetype' in spec:
+ config_args.append(
+ '--with-freetype={0}'.format(self.spec['freetype'].prefix)
+ )
+ else:
+ config_args.append('--without-freetype')
+
+ config_args.append('--without-hdf5')
+ # if '+hdf5' in spec:
+ # # FIXME
+ # config_args.append(
+ # '--with-hdf5={0}'.format(self.spec['hdf5'].prefix)
+ # )
+ # else:
+ # config_args.append('--without-hdf5')
+
+ if '+zlib' in spec:
+ config_args.append(
+ '--with-z={0}'.format(self.spec['zlib'].prefix)
+ )
+ else:
+ config_args.append('--without-z')
+
+ if '+bzip2' in spec:
+ config_args.append(
+ '--with-bz2={0}'.format(self.spec['bzip2'].prefix)
+ )
+ else:
+ config_args.append('--without-bz2')
+
+ if '+lzo' in spec:
+ config_args.append(
+ '--with-lzo={0}'.format(self.spec['lzo'].prefix)
+ )
+ else:
+ config_args.append('--without-lzo')
+
+ if '+gnutls' in spec:
+ config_args.append(
+ '--with-gnutls={0}'.format(self.spec['gnutls'].prefix)
+ )
+ else:
+ config_args.append('--without-gnutls')
+
+ if '+openssl' in spec:
+ config_args.append(
+ '--with-openssl={0}'.format(self.spec['openssl'].prefix)
+ )
+ else:
+ config_args.append('--without-openssl')
+
+ if '+pcre' in spec:
+ config_args.append(
+ '--with-pcre={0}'.format(self.spec['pcre'].prefix)
+ )
+ else:
+ config_args.append('--without-pcre')
+
+ return config_args
diff --git a/var/spack/repos/builtin/packages/blat/package.py b/var/spack/repos/builtin/packages/blat/package.py
new file mode 100644
index 0000000000..8a9cce50c1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/blat/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Blat(Package):
+ """BLAT (BLAST-like alignment tool) is a pairwise sequence
+ alignment algorithm."""
+
+ homepage = "https://genome.ucsc.edu/FAQ/FAQblat.html"
+ url = "https://users.soe.ucsc.edu/~kent/src/blatSrc35.zip"
+
+ version('35', '16e546b8843b85e0b0f2fa603cd78724')
+
+ depends_on('libpng')
+
+ def setup_environment(self, spack_env, run_env):
+ spack_env.set('MACHTYPE', 'x86_64')
+
+ def install(self, spec, prefix):
+ mkdirp(prefix.bin)
+ make("BINDIR=%s" % prefix.bin)
diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py
index 6f891de664..06df688ea1 100644
--- a/var/spack/repos/builtin/packages/boost/package.py
+++ b/var/spack/repos/builtin/packages/boost/package.py
@@ -40,7 +40,7 @@ class Boost(Package):
homepage = "http://www.boost.org"
url = "http://downloads.sourceforge.net/project/boost/boost/1.55.0/boost_1_55_0.tar.bz2"
list_url = "http://sourceforge.net/projects/boost/files/boost/"
- list_depth = 2
+ list_depth = 1
version('1.63.0', '1c837ecd990bb022d07e7aab32b09847')
version('1.62.0', '5fb94629535c19e48703bdb2b2e9490f')
@@ -141,14 +141,8 @@ class Boost(Package):
patch('xl_1_62_0_le.patch', when='@1.62.0%xl')
def url_for_version(self, version):
- """
- Handle Boost's weird URLs,
- which write the version two different ways.
- """
- parts = [str(p) for p in Version(version)]
- dots = ".".join(parts)
- underscores = "_".join(parts)
- return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (dots, underscores)
+ url = "http://downloads.sourceforge.net/project/boost/boost/{0}/boost_{1}.tar.bz2"
+ return url.format(version.dotted, version.underscored)
def determine_toolset(self, spec):
if spec.satisfies("platform=darwin"):
diff --git a/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.2.5.patch
index 290be39c73..290be39c73 100644
--- a/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.5.patch
+++ b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.2.5.patch
diff --git a/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.3.1.patch b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.3.1.patch
new file mode 100644
index 0000000000..0f4a15b1a1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bowtie2/bowtie2-2.3.1.patch
@@ -0,0 +1,16 @@
+--- Makefile.orig 2017-03-24 20:43:00.304532976 -0700
++++ Makefile 2017-03-24 20:48:59.644532037 -0700
+@@ -26,10 +26,10 @@
+
+ INC =
+ LIBS = -lreadline -ltermcap -lz
+-GCC_PREFIX = $(shell dirname `which gcc`)
++GCC_PREFIX =
+ GCC_SUFFIX =
+-CC ?= $(GCC_PREFIX)/gcc$(GCC_SUFFIX)
+-CPP ?= $(GCC_PREFIX)/g++$(GCC_SUFFIX)
++CC = cc
++CPP = c++
+ CXX ?= $(CPP)
+ HEADERS = $(wildcard *.h)
+ BOWTIE_MM = 1
diff --git a/var/spack/repos/builtin/packages/bowtie2/package.py b/var/spack/repos/builtin/packages/bowtie2/package.py
index a8a1a34ed4..dc850d817f 100644
--- a/var/spack/repos/builtin/packages/bowtie2/package.py
+++ b/var/spack/repos/builtin/packages/bowtie2/package.py
@@ -27,12 +27,19 @@ from glob import glob
class Bowtie2(Package):
- """Description"""
+ """Bowtie 2 is an ultrafast and memory-efficient tool for aligning
+ sequencing reads to long reference sequences"""
+
homepage = "bowtie-bio.sourceforge.net/bowtie2/index.shtml"
- version('2.2.5', '51fa97a862d248d7ee660efc1147c75f',
- url="http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip")
+ url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.3.1/bowtie2-2.3.1-source.zip"
+
+ version('2.3.1', 'b4efa22612e98e0c23de3d2c9f2f2478')
+ version('2.2.5', '51fa97a862d248d7ee660efc1147c75f')
+
+ depends_on('tbb', when='@2.3.1')
- patch('bowtie2-2.5.patch', when='@2.2.5', level=0)
+ patch('bowtie2-2.2.5.patch', when='@2.2.5', level=0)
+ patch('bowtie2-2.3.1.patch', when='@2.3.1', level=0)
def install(self, spec, prefix):
make()
diff --git a/var/spack/repos/builtin/packages/cddlib/package.py b/var/spack/repos/builtin/packages/cddlib/package.py
index 50dc5ad472..002c302599 100644
--- a/var/spack/repos/builtin/packages/cddlib/package.py
+++ b/var/spack/repos/builtin/packages/cddlib/package.py
@@ -31,19 +31,9 @@ class Cddlib(AutotoolsPackage):
Method of Motzkin et al. for generating all vertices (i.e. extreme points)
and extreme rays of a general convex polyhedron in R^d given by a system
of linear inequalities"""
- homepage = "https://www.inf.ethz.ch/personal/fukudak/cdd_home/"
- # This is the original download url. It is currently down [2016-08-23],
- # but should be reinstated or updated once the issue is resolved.
- # url = "ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cddlib-094h.tar.gz"
- url = "http://pkgs.fedoraproject.org/lookaside/pkgs/cddlib/cddlib-094h.tar.gz/1467d270860bbcb26d3ebae424690e7c/cddlib-094h.tar.gz"
- def url_for_version(self, version):
- # Since the commit id is part of the version, we can't
- # auto-generate the string, and we need to explicitly list all
- # known versions here. Currently, there is only one version.
- if str(version) == '0.94h':
- return "http://pkgs.fedoraproject.org/lookaside/pkgs/cddlib/cddlib-094h.tar.gz/1467d270860bbcb26d3ebae424690e7c/cddlib-094h.tar.gz"
- raise InstallError("Unsupported version %s" % str(version))
+ homepage = "https://www.inf.ethz.ch/personal/fukudak/cdd_home/"
+ url = "ftp://ftp.math.ethz.ch/users/fukudak/cdd/cddlib-094h.tar.gz"
version('0.94h', '1467d270860bbcb26d3ebae424690e7c')
@@ -51,3 +41,7 @@ class Cddlib(AutotoolsPackage):
depends_on("gmp")
depends_on("libtool", type="build")
+
+ def url_for_version(self, version):
+ url = "ftp://ftp.math.ethz.ch/users/fukudak/cdd/cddlib-{0}.tar.gz"
+ return url.format(version.joined)
diff --git a/var/spack/repos/builtin/packages/cdo/package.py b/var/spack/repos/builtin/packages/cdo/package.py
index 775dc31cf3..90039d4479 100644
--- a/var/spack/repos/builtin/packages/cdo/package.py
+++ b/var/spack/repos/builtin/packages/cdo/package.py
@@ -30,9 +30,13 @@ class Cdo(Package):
Climate and NWP model Data. """
homepage = "https://code.zmaw.de/projects/cdo"
+ url = "https://code.zmaw.de/attachments/download/12760/cdo-1.7.2.tar.gz"
+ list_url = "https://code.zmaw.de/projects/cdo/files"
- version('1.7.2', 'f08e4ce8739a4f2b63fc81a24db3ee31', url='https://code.zmaw.de/attachments/download/12760/cdo-1.7.2.tar.gz')
- version('1.6.9', 'bf0997bf20e812f35e10188a930e24e2', url='https://code.zmaw.de/attachments/download/10198/cdo-1.6.9.tar.gz')
+ version('1.7.2', 'f08e4ce8739a4f2b63fc81a24db3ee31',
+ url='https://code.zmaw.de/attachments/download/12760/cdo-1.7.2.tar.gz')
+ version('1.6.9', 'bf0997bf20e812f35e10188a930e24e2',
+ url='https://code.zmaw.de/attachments/download/10198/cdo-1.6.9.tar.gz')
variant('szip', default=True, description='Enable szip compression for GRIB1')
variant('hdf5', default=False, description='Enable HDF5 support')
@@ -54,7 +58,7 @@ class Cdo(Package):
depends_on('proj', when='+proj')
depends_on('curl', when='+curl')
depends_on('fftw', when='+fftw')
- depends_on('magics', when='+magics')
+ depends_on('magics', when='+magics')
def install(self, spec, prefix):
config_args = ["--prefix=" + prefix,
diff --git a/var/spack/repos/builtin/packages/cfitsio/package.py b/var/spack/repos/builtin/packages/cfitsio/package.py
index 811b3ca9bc..b382f87f5b 100644
--- a/var/spack/repos/builtin/packages/cfitsio/package.py
+++ b/var/spack/repos/builtin/packages/cfitsio/package.py
@@ -31,6 +31,7 @@ class Cfitsio(AutotoolsPackage):
"""
homepage = 'http://heasarc.gsfc.nasa.gov/fitsio/'
+ url = 'http://heasarc.gsfc.nasa.gov/FTP/software/fitsio/c/cfitsio3410.tar.gz'
version('3.410', '8a4a66fcdd816aae41768baa0b025552')
version('3.370', 'abebd2d02ba5b0503c633581e3bfa116')
diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py
index 4ff9615016..1dd480d675 100644
--- a/var/spack/repos/builtin/packages/cmake/package.py
+++ b/var/spack/repos/builtin/packages/cmake/package.py
@@ -31,7 +31,7 @@ class Cmake(Package):
homepage = 'https://www.cmake.org'
url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz'
list_url = 'https://cmake.org/files/'
- list_depth = 2
+ list_depth = 1
version('3.7.2', '79bd7e65cd81ea3aa2619484ad6ff25a')
version('3.7.1', 'd031d5a06e9f1c5367cdfc56fbd2a1c8')
diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py
index 0909d8cb81..9bc1026ba9 100644
--- a/var/spack/repos/builtin/packages/cp2k/package.py
+++ b/var/spack/repos/builtin/packages/cp2k/package.py
@@ -37,6 +37,7 @@ class Cp2k(Package):
homepage = 'https://www.cp2k.org'
url = 'https://sourceforge.net/projects/cp2k/files/cp2k-3.0.tar.bz2'
+ version('4.1', 'b0534b530592de15ac89828b1541185e')
version('3.0', 'c05bc47335f68597a310b1ed75601d35')
variant('mpi', default=True, description='Enable MPI support')
@@ -47,14 +48,18 @@ class Cp2k(Package):
depends_on('lapack')
depends_on('blas')
depends_on('fftw')
- depends_on('libint@:1.2', when='@3.0')
+ depends_on('libint@:1.2', when='@3.0,4.1')
depends_on('mpi', when='+mpi')
depends_on('scalapack', when='+mpi')
depends_on('plumed+shared+mpi', when='+plumed+mpi')
depends_on('plumed+shared~mpi', when='+plumed~mpi')
- depends_on('pexsi', when='+mpi')
- depends_on('wannier90', when='+mpi')
+ depends_on('pexsi+fortran', when='+mpi')
+
+ # Apparently cp2k@4.1 needs an "experimental" version of libwannier.a
+ # which is only available contacting the developer directly. See INSTALL
+ # in the stage of cp2k@4.1
+ depends_on('wannier90', when='@3.0+mpi')
depends_on('elpa', when='+mpi')
# TODO : add dependency on libsmm, libxsmm
@@ -94,6 +99,8 @@ class Cp2k(Package):
fcflags.append(spec['fftw'].cppflags)
fftw = find_libraries('libfftw3', root=spec['fftw'].prefix.lib)
ldflags = [fftw.search_flags]
+ if 'superlu-dist@4.3' in spec:
+ ldflags = ['-Wl,--allow-multiple-definition'] + ldflags
libs = [
join_path(spec['libint'].prefix.lib, 'libint.so'),
join_path(spec['libint'].prefix.lib, 'libderiv.so'),
@@ -147,10 +154,12 @@ class Cp2k(Package):
cppflags.extend([
'-D__parallel',
'-D__LIBPEXSI',
- '-D__WANNIER90',
'-D__ELPA3',
'-D__SCALAPACK'
])
+ if 'wannier90' in spec:
+ cppflags.append('-D__WANNIER90')
+
fcflags.extend([
# spec['elpa:fortran'].cppflags
'-I' + join_path(
@@ -167,7 +176,6 @@ class Cp2k(Package):
libs.extend([
join_path(spec['elpa'].prefix.lib,
'libelpa.{0}'.format(dso_suffix)),
- join_path(spec['wannier90'].prefix.lib, 'libwannier.a'),
join_path(spec['pexsi'].prefix.lib, 'libpexsi.a'),
join_path(spec['superlu-dist'].prefix.lib,
'libsuperlu_dist.a'),
@@ -180,6 +188,13 @@ class Cp2k(Package):
'libmetis.{0}'.format(dso_suffix)
),
])
+
+ if 'wannier90' in spec:
+ wannier = join_path(
+ spec['wannier90'].prefix.lib, 'libwannier.a'
+ )
+ libs.append(wannier)
+
libs.extend(scalapack)
libs.extend(self.spec['mpi'].mpicxx_shared_libs)
libs.extend(self.compiler.stdcxx_libs)
diff --git a/var/spack/repos/builtin/packages/cppad/package.py b/var/spack/repos/builtin/packages/cppad/package.py
index 1ec31bbeef..e17a070294 100644
--- a/var/spack/repos/builtin/packages/cppad/package.py
+++ b/var/spack/repos/builtin/packages/cppad/package.py
@@ -29,16 +29,13 @@ class Cppad(CMakePackage):
"""A Package for Differentiation of C++ Algorithms."""
homepage = "https://www.coin-or.org/CppAD/"
+ url = "http://www.coin-or.org/download/source/CppAD/cppad-20170114.gpl.tgz"
version('20170114', '565a534dc813fa1289764222cd8c11ea')
version('develop', git='https://github.com/coin-or/CppAD.git')
depends_on('cmake', type='build')
- def url_for_version(self, version):
- """Handle version-based custom URLs."""
- return "http://www.coin-or.org/download/source/CppAD/cppad-%s.gpl.tgz" % (version)
-
def cmake_args(self):
# This package does not obey CMAKE_INSTALL_PREFIX
args = [
diff --git a/var/spack/repos/builtin/packages/cppcheck/package.py b/var/spack/repos/builtin/packages/cppcheck/package.py
index fd48fcb7e3..254674c7bf 100644
--- a/var/spack/repos/builtin/packages/cppcheck/package.py
+++ b/var/spack/repos/builtin/packages/cppcheck/package.py
@@ -23,6 +23,8 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import os
+import shutil
class Cppcheck(Package):
@@ -30,11 +32,13 @@ class Cppcheck(Package):
homepage = "http://cppcheck.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/cppcheck/cppcheck/1.68/cppcheck-1.68.tar.bz2"
+ version('1.72', '2bd36f91ae0191ef5273bb7f6dc0d72e')
version('1.68', 'c015195f5d61a542f350269030150708')
def install(self, spec, prefix):
# cppcheck does not have a configure script
- make()
+ make("CFGDIR=%s" % os.path.join(prefix, 'cfg'))
# manually install the final cppcheck binary
mkdirp(prefix.bin)
install('cppcheck', prefix.bin)
+ shutil.copytree('cfg', os.path.join(prefix, 'cfg'))
diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py
index c92f262a9a..142bd4f253 100644
--- a/var/spack/repos/builtin/packages/cryptopp/package.py
+++ b/var/spack/repos/builtin/packages/cryptopp/package.py
@@ -36,6 +36,7 @@ class Cryptopp(Package):
"""
homepage = "http://www.cryptopp.com"
+ url = "http://www.cryptopp.com/cryptopp563.zip"
version('5.6.3', '3c5b70e2ec98b7a24988734446242d07')
version('5.6.2', '7ed022585698df48e65ce9218f6c6a67')
diff --git a/var/spack/repos/builtin/packages/cub/package.py b/var/spack/repos/builtin/packages/cub/package.py
new file mode 100644
index 0000000000..9c0c7776bd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cub/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from distutils.dir_util import copy_tree
+
+
+class Cub(Package):
+ """CUB is a C++ header library of cooperative threadblock primitives
+ and other utilities for CUDA kernel programming."""
+
+ homepage = "https://nvlabs.github.com/cub"
+ url = "https://github.com/NVlabs/cub/archive/1.6.4.zip"
+
+ version('1.6.4', '924fc12c0efb17264c3ad2d611ed1c51')
+
+ def install(self, spec, prefix):
+ copy_tree('cub', prefix.include)
diff --git a/var/spack/repos/builtin/packages/cvs/package.py b/var/spack/repos/builtin/packages/cvs/package.py
new file mode 100644
index 0000000000..e84c1ed92f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cvs/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Cvs(AutotoolsPackage):
+ """CVS a very traditional source control system"""
+ homepage = "http://www.nongnu.org/cvs/"
+ url = "https://ftp.gnu.org/non-gnu/cvs/source/feature/1.12.13/cvs-1.12.13.tar.bz2"
+
+ version('1.12.13', '93a8dacc6ff0e723a130835713235863f1f5ada9')
+
+ parallel = False
diff --git a/var/spack/repos/builtin/packages/dakota/package.py b/var/spack/repos/builtin/packages/dakota/package.py
index e8f7d0889b..c40229f83b 100644
--- a/var/spack/repos/builtin/packages/dakota/package.py
+++ b/var/spack/repos/builtin/packages/dakota/package.py
@@ -46,7 +46,6 @@ class Dakota(Package):
homepage = 'https://dakota.sandia.gov/'
url = 'https://dakota.sandia.gov/sites/default/files/distributions/public/dakota-6.3-public.src.tar.gz'
- _url_str = 'https://dakota.sandia.gov/sites/default/files/distributions/public/dakota-{version}-public.src.tar.gz'
version('6.3', '05a58d209fae604af234c894c3f73f6d')
@@ -64,9 +63,6 @@ class Dakota(Package):
depends_on('boost')
depends_on('cmake', type='build')
- def url_for_version(self, version):
- return Dakota._url_str.format(version=version)
-
def install(self, spec, prefix):
options = []
options.extend(std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/datamash/package.py b/var/spack/repos/builtin/packages/datamash/package.py
index 4cf7d24ef1..a11b156c46 100644
--- a/var/spack/repos/builtin/packages/datamash/package.py
+++ b/var/spack/repos/builtin/packages/datamash/package.py
@@ -37,3 +37,5 @@ class Datamash(AutotoolsPackage):
version('1.0.7', '9f317bab07454032ba9c068e7f17b04b')
version('1.0.6', 'ff26fdef0f343cb695cf1853e14a1a5b')
version('1.0.5', '9a29549dc7feca49fdc5fab696614e11')
+
+ build_directory = 'spack-build'
diff --git a/var/spack/repos/builtin/packages/direnv/package.py b/var/spack/repos/builtin/packages/direnv/package.py
new file mode 100644
index 0000000000..336ea9f907
--- /dev/null
+++ b/var/spack/repos/builtin/packages/direnv/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Direnv(Package):
+ """direnv is an environment switcher for the shell."""
+
+ homepage = "https://direnv.net/"
+ url = "https://github.com/direnv/direnv/archive/v2.11.3.tar.gz"
+
+ version('2.11.3', '5b9728e2dabed232b4932849647fd6e5')
+
+ depends_on('go', type='build')
+
+ def install(self, spec, prefix):
+ make('install', "DESTDIR=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/double-conversion/package.py b/var/spack/repos/builtin/packages/double-conversion/package.py
new file mode 100644
index 0000000000..1400996853
--- /dev/null
+++ b/var/spack/repos/builtin/packages/double-conversion/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class DoubleConversion(CMakePackage):
+ """This project (double-conversion) provides binary-decimal and decimal-binary
+ routines for IEEE doubles.
+
+ The library consists of efficient conversion routines that have been
+ extracted from the V8 JavaScript engine. The code has been refactored
+ and improved so that it can be used more easily in other projects.
+
+ There is extensive documentation in src/double-conversion.h. Other examples
+ can be found in test/cctest/test-conversions.cc.
+ """
+
+ homepage = "https://github.com/google/double-conversion"
+ url = "https://github.com/google/double-conversion/archive/v2.0.1.zip"
+
+ version('2.0.1', '5be77f780841af528e92986d46620b1e')
+ version('2.0.0', '045f7927246c368b57dcdb844ec61211')
+ version('1.1.5', 'ddf782373e2630c07b2691c31cee0b24')
+ version('1.1.4', '5df72704406d93cd54c73d73f02e2744')
+ version('1.1.3', 'b312152c8c66c80449d5e0325b94502e')
diff --git a/var/spack/repos/builtin/packages/easybuild/package.py b/var/spack/repos/builtin/packages/easybuild/package.py
new file mode 100644
index 0000000000..156601ed65
--- /dev/null
+++ b/var/spack/repos/builtin/packages/easybuild/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2017, Kenneth Hoste
+#
+# This file is part of Spack.
+# Created by Kenneth Hoste, kenneth.hoste@gmail.com
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Easybuild(PythonPackage):
+ """EasyBuild is a software build and installation framework
+ for (scientific) software on HPC systems.
+ """
+
+ homepage = 'http://hpcugent.github.io/easybuild/'
+ url = 'https://pypi.io/packages/source/e/easybuild/easybuild-3.1.2.tar.gz'
+
+ version('3.1.2', 'c2d901c2a71f51b24890fa69c3a46383')
+
+ depends_on('py-easybuild-framework@3.1.2', when='@3.1.2', type='run')
+ depends_on('py-easybuild-easyblocks@3.1.2', when='@3.1.2', type='run')
+ depends_on('py-easybuild-easyconfigs@3.1.2', when='@3.1.2', type='run')
diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py
index 16c1f6d247..2594d73c37 100644
--- a/var/spack/repos/builtin/packages/elfutils/package.py
+++ b/var/spack/repos/builtin/packages/elfutils/package.py
@@ -37,10 +37,10 @@ class Elfutils(AutotoolsPackage):
url = "https://sourceware.org/elfutils/ftp/0.168/elfutils-0.168.tar.bz2"
list_url = "https://sourceware.org/elfutils/ftp"
- list_depth = 2
+ list_depth = 1
- version('0.168','52adfa40758d0d39e5d5c57689bf38d6')
- version('0.163','77ce87f259987d2e54e4d87b86cbee41')
+ version('0.168', '52adfa40758d0d39e5d5c57689bf38d6')
+ version('0.163', '77ce87f259987d2e54e4d87b86cbee41', preferred=True)
provides('elf@1')
diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py
index fe249269c7..033cb1e36c 100644
--- a/var/spack/repos/builtin/packages/elpa/package.py
+++ b/var/spack/repos/builtin/packages/elpa/package.py
@@ -26,24 +26,15 @@
from spack import *
-class Elpa(Package):
- """
- Eigenvalue solvers for Petaflop-Applications (ELPA)
- """
+class Elpa(AutotoolsPackage):
+ """Eigenvalue solvers for Petaflop-Applications (ELPA)"""
homepage = 'http://elpa.mpcdf.mpg.de/'
url = 'http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
- version(
- '2016.05.003',
- '88a9f3f3bfb63e16509dd1be089dcf2c',
- url='http://elpa.mpcdf.mpg.de/html/Releases/2016.05.003/elpa-2016.05.003.tar.gz'
- )
- version(
- '2015.11.001',
- 'de0f35b7ee7c971fd0dca35c900b87e6',
- url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
- )
+ version('2016.05.004', 'c0dd3a53055536fc3a2a221e78d8b376')
+ version('2016.05.003', '88a9f3f3bfb63e16509dd1be089dcf2c')
+ version('2015.11.001', 'de0f35b7ee7c971fd0dca35c900b87e6')
variant('openmp', default=False, description='Activates OpenMP support')
@@ -52,30 +43,26 @@ class Elpa(Package):
depends_on('lapack')
depends_on('scalapack')
- def install(self, spec, prefix):
+ def url_for_version(self, version):
+ t = 'http://elpa.mpcdf.mpg.de/html/Releases/{0}/elpa-{0}.tar.gz'
+ if version < Version('2016.05.003'):
+ t = 'http://elpa.mpcdf.mpg.de/elpa-{0}.tar.gz'
+ return t.format(str(version))
- options = [
- 'CC={0}'.format(self.spec['mpi'].mpicc),
- 'FC={0}'.format(self.spec['mpi'].mpifc),
- 'CXX={0}'.format(self.spec['mpi'].mpicxx),
- 'FCFLAGS={0}'.format(
- spec['lapack'].libs.joined()
- ),
- 'LDFLAGS={0}'.format(
- spec['lapack'].libs.joined()
- ),
- 'SCALAPACK_FCFLAGS={0}'.format(
- spec['scalapack'].libs.joined()
- ),
- 'SCALAPACK_LDFLAGS={0}'.format(
- spec['scalapack'].libs.joined()
- ),
- '--prefix={0}'.format(self.prefix)
- ]
+ def setup_environment(self, spack_env, run_env):
- if '+openmp' in spec:
- options.append("--enable-openmp")
+ spec = self.spec
+
+ spack_env.set('CC', spec['mpi'].mpicc)
+ spack_env.set('FC', spec['mpi'].mpifc)
+ spack_env.set('CXX', spec['mpi'].mpicxx)
- configure(*options)
- make()
- make("install")
+ spack_env.set('LDFLAGS', spec['lapack'].libs.search_flags)
+ spack_env.set('LIBS', spec['lapack'].libs.link_flags)
+ spack_env.set('SCALAPACK_LDFLAGS', spec['scalapack'].libs.joined())
+
+ def configure_args(self):
+ options = []
+ if '+openmp' in self.spec:
+ options.append("--enable-openmp")
+ return options
diff --git a/var/spack/repos/builtin/packages/exonerate/package.py b/var/spack/repos/builtin/packages/exonerate/package.py
index 7921e64058..2615d859d6 100644
--- a/var/spack/repos/builtin/packages/exonerate/package.py
+++ b/var/spack/repos/builtin/packages/exonerate/package.py
@@ -29,7 +29,7 @@ class Exonerate(Package):
"""Pairwise sequence alignment of DNA and proteins"""
homepage = "http://www.ebi.ac.uk/about/vertebrate-genomics/software/exonerate"
- url = "http://ftp.ebi.ac.uk/pub/software/vertebrategenomics/exonerate/exonerate-2.2.0.tar.gz"
+ url = "http://ftp.ebi.ac.uk/pub/software/vertebrategenomics/exonerate/exonerate-2.4.0.tar.gz"
version('2.4.0', '126fbade003b80b663a1d530c56f1904')
diff --git a/var/spack/repos/builtin/packages/ferret/package.py b/var/spack/repos/builtin/packages/ferret/package.py
index 15ddfcee16..4dcff54b8f 100644
--- a/var/spack/repos/builtin/packages/ferret/package.py
+++ b/var/spack/repos/builtin/packages/ferret/package.py
@@ -31,11 +31,10 @@ class Ferret(Package):
"""Ferret is an interactive computer visualization and analysis environment
designed to meet the needs of oceanographers and meteorologists
analyzing large and complex gridded data sets."""
- homepage = "http://ferret.noaa.gov/Ferret/"
- url = "ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.tar.gz"
+ homepage = "http://ferret.pmel.noaa.gov/Ferret/home"
+ url = "ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.v696.tar.gz"
- version('6.96', '51722027c864369f41bab5751dfff8cc',
- url="ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.tar.gz")
+ version('6.96', '51722027c864369f41bab5751dfff8cc')
depends_on("hdf5~mpi~fortran")
depends_on("netcdf~mpi")
@@ -43,6 +42,10 @@ class Ferret(Package):
depends_on("readline")
depends_on("zlib")
+ def url_for_version(self, version):
+ return "ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.v{0}.tar.gz".format(
+ version.joined)
+
def patch(self):
hdf5_prefix = self.spec['hdf5'].prefix
netcdff_prefix = self.spec['netcdf-fortran'].prefix
diff --git a/var/spack/repos/builtin/packages/fio/package.py b/var/spack/repos/builtin/packages/fio/package.py
new file mode 100644
index 0000000000..09554968ef
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fio/package.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fio(AutotoolsPackage):
+ """Flexible I/O Tester."""
+
+ homepage = "https://github.com/axboe/fio"
+ url = "https://github.com/axboe/fio/archive/fio-2.19.tar.gz"
+
+ version('2.19', '67125b60210a4daa689a4626fc66c612')
+
+ variant('gui', default=False, description='Enable building of gtk gfio')
+ variant('doc', default=False, description='Generate documentation')
+
+ depends_on('gtkplus@2.18:', when='+gui')
+ depends_on('cairo', when='+gui')
+
+ depends_on('py-sphinx', type='build', when='+doc')
+
+ def configure_args(self):
+ config_args = []
+
+ if '+gui' in self.spec:
+ config_args.append('--enable-gfio')
+
+ return config_args
+
+ @run_after('build')
+ def build_docs(self):
+ if '+doc' in self.spec:
+ make('-C', 'doc', 'html')
+ make('-C', 'doc', 'man')
diff --git a/var/spack/repos/builtin/packages/foam-extend/package.py b/var/spack/repos/builtin/packages/foam-extend/package.py
index e009d64f51..559cc45d7a 100644
--- a/var/spack/repos/builtin/packages/foam-extend/package.py
+++ b/var/spack/repos/builtin/packages/foam-extend/package.py
@@ -9,6 +9,8 @@
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
+# License
+# -------
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
@@ -21,17 +23,51 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# Legal Notice
+# ------------
+# OPENFOAM is a trademark owned by OpenCFD Ltd
+# (producer and distributor of the OpenFOAM software via www.openfoam.com).
+# The trademark information must remain visible and unadulterated in this
+# file and via the "spack info" and comply with the term set by
+# http://openfoam.com/legal/trademark-policy.php
+#
+# This file is not part of OpenFOAM, nor does it constitute a component of an
+# OpenFOAM distribution.
+#
+##############################################################################
+#
+# Notes
+# - mpi handling: WM_MPLIB=USER and provide wmake rules for special purpose
+# 'USER and 'USERMPI' mpi implementations.
+# The choice of 'USER' vs 'USERMPI' may change in the future.
+#
+# Changes
+# 2017-03-28 Mark Olesen <mark.olesen@esi-group.com>
+# - avoid installing intermediate targets.
+# - reworked to mirror the openfoam-com package.
+# If changes are needed here, consider if they need applying there too.
+#
##############################################################################
from spack import *
from spack.environment import *
import multiprocessing
+import glob
+import re
+import shutil
import os
+from os.path import isdir, isfile
+from spack.pkg.builtin.openfoam_com import *
class FoamExtend(Package):
- """The foam-extend project is a fork of the OpenFOAM open source library
- for Computational Fluid Dynamics (CFD)."""
+ """The Extend Project is a fork of the OpenFOAM opensource library
+ for Computational Fluid Dynamics (CFD).
+ This offering is not approved or endorsed by OpenCFD Ltd,
+ producer and distributor of the OpenFOAM software via www.openfoam.com,
+ and owner of the OPENFOAM trademark.
+ """
homepage = "http://www.extend-project.de/"
@@ -40,207 +76,235 @@ class FoamExtend(Package):
version('3.1', git='http://git.code.sf.net/p/foam-extend/foam-extend-3.1')
version('3.0', git='http://git.code.sf.net/p/foam-extend/foam-extend-3.0')
- variant('paraview', default=False, description='Enable ParaFOAM')
- variant(
- 'scotch', default=True,
- description='Activate Scotch as a possible decomposition library')
- variant(
- 'ptscotch', default=True,
- description='Activate PT-Scotch as a possible decomposition library')
- variant(
- 'metis', default=True,
- description='Activate Metis as a possible decomposition library')
- variant(
- 'parmetis', default=True,
- description='Activate Parmetis as a possible decomposition library')
- variant(
- 'parmgridgen', default=True,
- description='Activate Parmgridgen support')
- variant(
- 'source', default=True,
- description='Installs also the source folder')
-
- supported_compilers = {'clang': 'Clang', 'gcc': 'Gcc', 'intel': 'Icc'}
-
+ # variant('int64', default=False,
+ # description='Compile with 64-bit labels')
+ variant('float32', default=False,
+ description='Compile with 32-bit scalar (single-precision)')
+
+ variant('paraview', default=False,
+ description='Build paraview plugins (eg, paraFoam)')
+ variant('scotch', default=True,
+ description='With scotch for decomposition')
+ variant('ptscotch', default=True,
+ description='With ptscotch for decomposition')
+ variant('metis', default=True,
+ description='With metis for decomposition')
+ variant('parmetis', default=True,
+ description='With parmetis for decomposition')
+ variant('parmgridgen', default=True,
+ description='With parmgridgen support')
+ variant('source', default=True,
+ description='Install library/application sources and tutorials')
+
+ #: Map spack compiler names to OpenFOAM compiler names
+ # By default, simply capitalize the first letter
+ compiler_mapping = {'intel': 'icc'}
+
+ provides('openfoam')
depends_on('mpi')
depends_on('python')
- depends_on('flex')
depends_on('zlib')
+ depends_on('flex@:2.6.1') # <- restriction due to scotch
depends_on('cmake', type='build')
- depends_on('scotch ~ metis', when='~ptscotch+scotch')
- depends_on('scotch ~ metis + mpi', when='+ptscotch')
- depends_on('metis@5:', when='+metis')
- depends_on('parmetis', when='+parmetis')
- depends_on('parmgridgen', when='+parmgridgen')
-
- depends_on('paraview', when='+paraview')
-
- def set_arch(self):
- (sysname, nodename, release, version, machine) = os.uname()
-
- if self.compiler.name not in self.supported_compilers:
- raise RuntimeError('{0} is not a supported compiler \
- to compile OpenFOAM'.format(self.compiler.name))
-
- foam_compiler = self.supported_compilers[self.compiler.name]
- if sysname == 'Linux':
- arch = 'linux'
- if foam_compiler == 'Clang':
- raise RuntimeError('OS, compiler combination not\
- supported ({0} {1})'.format(sysname, foam_compiler))
- elif sysname == 'Darwin':
- if machine == 'x86_64':
- arch = 'darwinIntel'
- if foam_compiler == 'Icc':
- raise RuntimeError('OS, compiler combination not\
- supported ({0} {1})'.format(sysname, foam_compiler))
- else:
- raise RuntimeError('{0} {1} is not a \
- supported architecture'.format(sysname, machine))
-
- return (arch, foam_compiler)
+ depends_on('scotch~metis', when='~ptscotch+scotch')
+ depends_on('scotch~metis+mpi', when='+ptscotch')
+ depends_on('metis@5:', when='+metis')
+ depends_on('parmetis', when='+parmetis')
+ depends_on('parmgridgen', when='+parmgridgen')
+ depends_on('paraview@:5.0.1', when='+paraview')
+
+ # Some user settings, to be adjusted manually or via variants
+ foam_cfg = {
+ 'WM_COMPILER': 'Gcc', # <- %compiler
+ 'WM_ARCH_OPTION': '64', # (32/64-bit on x86_64)
+ # FUTURE? 'WM_LABEL_SIZE': '32', # <- +int64
+ 'WM_PRECISION_OPTION': 'DP', # <- +float32
+ 'WM_COMPILE_OPTION': 'SPACKOpt', # Do not change
+ 'WM_MPLIB': 'USER', # USER | USERMPI
+ }
+
+ # The system description is frequently needed
+ foam_sys = {
+ 'WM_ARCH': None,
+ 'WM_COMPILER': None,
+ 'WM_OPTIONS': None,
+ }
+
+ # Content for etc/prefs.{csh,sh}
+ etc_prefs = {}
+
+ # Content for etc/config.{csh,sh}/ files
+ etc_config = {}
+
+ build_script = './spack-Allwmake' # <- Generated by patch() method.
+ # phases = ['configure', 'build', 'install']
+ # build_system_class = 'OpenfoamCom'
- def get_openfoam_environment(self):
- return EnvironmentModifications.from_sourcing_files(
- join_path(self.stage.source_path,
- 'etc/bashrc'))
+ def setup_environment(self, spack_env, run_env):
+ run_env.set('FOAM_INST_DIR', self.prefix)
+ run_env.set('WM_PROJECT_DIR', self.projectdir)
+
+ @property
+ def _canonical(self):
+ """Canonical name for this package and version"""
+ return 'foam-extend-{0}'.format(self.version.up_to(2))
+
+ @property
+ def projectdir(self):
+ """Absolute location of project directory: WM_PROJECT_DIR/"""
+ return join_path(self.prefix, self._canonical) # <- prefix/canonical
+
+ @property
+ def etc(self):
+ """Absolute location of the OpenFOAM etc/ directory"""
+ return join_path(self.projectdir, 'etc')
+
+ @property
+ def archbin(self):
+ """Relative location of architecture-specific executables"""
+ wm_options = self.set_openfoam()
+ return join_path('applications', 'bin', wm_options)
+
+ @property
+ def archlib(self):
+ """Relative location of architecture-specific libraries"""
+ wm_options = self.set_openfoam()
+ return join_path('lib', wm_options)
+
+ @property
+ def wm_options(self):
+ """The architecture+compiler+options for OpenFOAM"""
+ opts = self.set_openfoam()
+ return opts
+
+ @property
+ def rpath_info(self):
+ """Define 'SPACKOpt' compiler optimization file to have wmake
+ use spack information with minimum modifications to OpenFOAM
+ """
+ build_libpath = join_path(self.stage.source_path, self.archlib)
+ install_libpath = join_path(self.projectdir, self.archlib)
+
+ # 'DBUG': rpaths
+ return '{0}{1} {2}{3}'.format(
+ self.compiler.cxx_rpath_arg, install_libpath,
+ self.compiler.cxx_rpath_arg, build_libpath)
+
+ def openfoam_arch(self):
+ """Return an architecture value similar to what OpenFOAM does in
+ etc/config.sh/settings, but slightly more generous.
+ Uses and may adjust foam_cfg[WM_ARCH_OPTION] as a side-effect
+ """
+ # spec.architecture.platform is like `uname -s`, but lower-case
+ platform = self.spec.architecture.platform
+
+ # spec.architecture.target is like `uname -m`
+ target = self.spec.architecture.target
+
+ if platform == 'linux':
+ if target == 'i686':
+ self.foam_cfg['WM_ARCH_OPTION'] = '32' # Force consistency
+ elif target == 'x86_64':
+ if self.foam_cfg['WM_ARCH_OPTION'] == '64':
+ platform += '64'
+ elif target == 'ia64':
+ platform += 'ia64'
+ elif target == 'armv7l':
+ platform += 'ARM7'
+ elif target == ppc64:
+ platform += 'PPC64'
+ elif target == ppc64le:
+ platform += 'PPC64le'
+ elif platform == 'darwin':
+ if target == 'x86_64':
+ platform += 'Intel'
+ if self.foam_cfg['WM_ARCH_OPTION'] == '64':
+ platform += '64'
+ # ... and others?
+ return platform
+
+ def openfoam_compiler(self):
+ """Capitalized version of the compiler name, which usually corresponds
+ to how OpenFOAM will camel-case things.
+ Use compiler_mapping to handing special cases.
+ Also handle special compiler options (eg, KNL)
+ """
+ comp = self.compiler.name
+ if comp in self.compiler_mapping:
+ comp = self.compiler_mapping[comp]
+ comp = comp.capitalize()
+
+ if '+knl' in self.spec:
+ comp += 'KNL'
+ return comp
+
+ # For foam-extend: does not yet support +int64
+ def set_openfoam(self):
+ """Populate foam_cfg, foam_sys according to
+ variants, architecture, compiler.
+ Returns WM_OPTIONS.
+ """
+ # Run once
+ opts = self.foam_sys['WM_OPTIONS']
+ if opts:
+ return opts
+
+ wm_arch = self.openfoam_arch()
+ wm_compiler = self.openfoam_compiler()
+ compileOpt = self.foam_cfg['WM_COMPILE_OPTION']
+
+ # Insist on a wmake rule for this architecture/compiler combination
+ archCompiler = wm_arch + wm_compiler
+ compiler_rule = join_path(
+ self.stage.source_path, 'wmake', 'rules', archCompiler)
+
+ if not isdir(compiler_rule):
+ raise RuntimeError(
+ 'No wmake rule for {0}'.format(archCompiler))
+ if not re.match(r'.+Opt$', compileOpt):
+ raise RuntimeError(
+ "WM_COMPILE_OPTION={0} is not type '*Opt'".format(compileOpt))
+
+ # Adjust for variants
+ # FUTURE? self.foam_cfg['WM_LABEL_SIZE'] = (
+ # FUTURE? '64' if '+int64' in self.spec else '32'
+ # FUTURE? )
+ self.foam_cfg['WM_PRECISION_OPTION'] = (
+ 'SP' if '+float32' in self.spec else 'DP'
+ )
+
+ # ----
+ # WM_OPTIONS=$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_COMPILE_OPTION
+ # ----
+ self.foam_sys['WM_ARCH'] = wm_arch
+ self.foam_sys['WM_COMPILER'] = wm_compiler
+ self.foam_cfg['WM_COMPILER'] = wm_compiler # For bashrc,cshrc too
+ self.foam_sys['WM_OPTIONS'] = ''.join([
+ wm_arch,
+ wm_compiler,
+ self.foam_cfg['WM_PRECISION_OPTION'],
+ # FUTURE? 'Int', self.foam_cfg['WM_LABEL_SIZE'], # Int32/Int64
+ compileOpt
+ ])
+ return self.foam_sys['WM_OPTIONS']
def patch(self):
- # change names to match the package and not the one patch in
- # the Third-Party of foam-extend
- if '+parmgridgen' in self.spec:
- filter_file(r'-lMGridGen',
- r'-lmgrid',
- 'src/dbns/Make/options')
-
- filter_file(
- r'-lMGridGen',
- r'-lmgrid',
- 'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options') # noqa: E501
-
- # Get the wmake arch and compiler
- (arch, foam_compiler) = self.set_arch()
-
- prefs_dict = {
- 'compilerInstall': 'System',
- 'WM_COMPILER': foam_compiler,
- 'WM_ARCH_OPTION': '64',
- 'WM_PRECISION_OPTION': 'DP',
- 'WM_COMPILE_OPTION': 'SPACKOpt',
- 'WM_MPLIB': 'SPACK',
-
- 'CMAKE_DIR': self.spec['cmake'].prefix,
- 'CMAKE_BIN_DIR': self.spec['cmake'].prefix.bin,
- 'PYTHON_DIR': self.spec['python'].prefix,
- 'PYTHON_BIN_DIR': self.spec['python'].prefix.bin,
-
- 'FLEX_SYSTEM': 1,
- 'FLEX_DIR': self.spec['flex'].prefix,
-
- 'BISON_SYSTEM': 1,
- 'BISON_DIR': self.spec['flex'].prefix,
-
- 'ZLIB_SYSTEM': 1,
- 'ZLIB_DIR': self.spec['zlib'].prefix,
- }
-
- if '+scotch' in self.spec or '+ptscotch' in self.spec:
- prefs_dict['SCOTCH_SYSTEM'] = 1
- prefs_dict['SCOTCH_DIR'] = self.spec['scotch'].prefix
- prefs_dict['SCOTCH_BIN_DIR'] = self.spec['scotch'].prefix.bin
- prefs_dict['SCOTCH_LIB_DIR'] = self.spec['scotch'].prefix.lib
- prefs_dict['SCOTCH_INCLUDE_DIR'] = \
- self.spec['scotch'].prefix.include
-
- if '+metis' in self.spec:
- prefs_dict['METIS_SYSTEM'] = 1
- prefs_dict['METIS_DIR'] = self.spec['metis'].prefix
- prefs_dict['METIS_BIN_DIR'] = self.spec['metis'].prefix.bin
- prefs_dict['METIS_LIB_DIR'] = self.spec['metis'].prefix.lib
- prefs_dict['METIS_INCLUDE_DIR'] = self.spec['metis'].prefix.include
-
- if '+parmetis' in self.spec:
- prefs_dict['PARMETIS_SYSTEM'] = 1
- prefs_dict['PARMETIS_DIR'] = self.spec['parmetis'].prefix
- prefs_dict['PARMETIS_BIN_DIR'] = self.spec['parmetis'].prefix.bin
- prefs_dict['PARMETIS_LIB_DIR'] = self.spec['parmetis'].prefix.lib
- prefs_dict['PARMETIS_INCLUDE_DIR'] = \
- self.spec['parmetis'].prefix.include
-
- if '+parmgridgen' in self.spec:
- prefs_dict['PARMGRIDGEN_SYSTEM'] = 1
- prefs_dict['PARMGRIDGEN_DIR'] = self.spec['parmgridgen'].prefix
- prefs_dict['PARMGRIDGEN_BIN_DIR'] = \
- self.spec['parmgridgen'].prefix.bin
- prefs_dict['PARMGRIDGEN_LIB_DIR'] = \
- self.spec['parmgridgen'].prefix.lib
- prefs_dict['PARMGRIDGEN_INCLUDE_DIR'] = \
- self.spec['parmgridgen'].prefix.include
-
- if '+paraview' in self.spec:
- prefs_dict['PARAVIEW_SYSTEM'] = 1
- prefs_dict['PARAVIEW_DIR'] = self.spec['paraview'].prefix
- prefs_dict['PARAVIEW_BIN_DIR'] = self.spec['paraview'].prefix.bin
- prefs_dict['QT_SYSTEM'] = 1
- prefs_dict['QT_DIR'] = self.spec['qt'].prefix
- prefs_dict['QT_BIN_DIR'] = self.spec['qt'].prefix.bin
-
- # write the prefs files to define the configuration needed,
- # only the prefs.sh is used by this script but both are
- # installed for end users
- with working_dir('.'):
- with open("etc/prefs.sh", "w") as fh:
- for key in sorted(prefs_dict):
- fh.write('export {0}={1}\n'.format(key, prefs_dict[key]))
-
- with open("etc/prefs.csh", "w") as fh:
- for key in sorted(prefs_dict):
- fh.write('setenv {0}={1}\n'.format(key, prefs_dict[key]))
-
- # Defining a different mpi and optimisation file to be able to
- # make wmake get spack info with minimum modifications on
- # configurations scripts
- mpi_info = [
- 'PFLAGS = -DOMPI_SKIP_MPICXX -DMPICH_IGNORE_CXX_SEEK',
- 'PINC = -I{0}'.format(self.spec['mpi'].prefix.include),
- 'PLIBS = -L{0} -lmpi'.format(self.spec['mpi'].prefix.lib)
+ """Adjust OpenFOAM build for spack. Where needed, apply filter as an
+ alternative to normal patching.
+ """
+ self.set_openfoam() # May need foam_cfg/foam_sys information
+
+ # Adjust ParMGridGen - this is still a mess
+ files = [
+ 'src/dbns/Make/options',
+ 'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options' # noqa: E501
]
+ for f in files:
+ filter_file(r'-lMGridGen', r'-lmgrid', f, backup=False)
- arch_path = ''.join([arch, prefs_dict['WM_ARCH_OPTION'],
- foam_compiler])
- option_path = ''.join([arch_path,
- prefs_dict['WM_PRECISION_OPTION'],
- prefs_dict['WM_COMPILE_OPTION']])
- rule_path = join_path("wmake", "rules", arch_path)
- build_path = join_path(self.stage.source_path, 'lib', option_path)
- install_path = \
- join_path(self.prefix,
- 'foam-extend-{0}'.format(self.version.up_to(2)),
- option_path)
-
- rpaths_foam = ' '.join([
- '{0}{1}'.format(self.compiler.cxx_rpath_arg,
- install_path),
- '{0}{1}'.format(self.compiler.cxx_rpath_arg,
- build_path)
- ])
-
- compiler_flags = {
- 'DBUG': rpaths_foam,
- 'OPT': '-O3',
- }
-
- with working_dir(rule_path):
- with open('mplibSPACK', "w") as fh:
- fh.write('\n'.join(mpi_info))
-
- for comp in ['c', 'c++']:
- with open('{0}SPACKOpt'.format(comp), "w") as fh:
- for key, val in compiler_flags.iteritems():
- fh.write('{0}{1} = {2}\n'.format(comp, key, val))
-
- _files_to_patch = [
+ # Adjust for flex version check
+ files = [
'src/thermophysicalModels/reactionThermo/chemistryReaders/chemkinReader/chemkinLexer.L', # noqa: E501
'src/surfMesh/surfaceFormats/stl/STLsurfaceFormatASCII.L', # noqa: E501
'src/meshTools/triSurface/triSurface/interfaces/STL/readSTLASCII.L', # noqa: E501
@@ -251,40 +315,198 @@ class FoamExtend(Package):
'applications/utilities/mesh/conversion/fluentMeshToFoam/fluentMeshToFoam.L', # noqa: E501
'applications/utilities/mesh/conversion/fluent3DMeshToElmer/fluent3DMeshToElmer.L' # noqa: E501
]
- for _file in _files_to_patch:
- filter_file(r'#if YY_FLEX_SUBMINOR_VERSION < 34',
- r'#if YY_FLEX_MAJOR_VERSION <= 2 && YY_FLEX_MINOR_VERSION <= 5 && YY_FLEX_SUBMINOR_VERSION < 34', # noqa: E501
- _file)
+ for f in files:
+ filter_file(
+ r'#if YY_FLEX_SUBMINOR_VERSION < 34',
+ r'#if YY_FLEX_MAJOR_VERSION <= 2 && YY_FLEX_MINOR_VERSION <= 5 && YY_FLEX_SUBMINOR_VERSION < 34', # noqa: E501
+ f, backup=False
+ )
+
+ # Build wrapper script
+ with open(self.build_script, 'w') as out:
+ out.write(
+ """#!/bin/bash
+export FOAM_INST_DIR=$(cd .. && pwd -L)
+. $PWD/etc/bashrc '' # No arguments
+mkdir -p $FOAM_APPBIN $FOAM_LIBBIN 2>/dev/null # Allow interrupt
+echo Build openfoam with SPACK
+echo WM_PROJECT_DIR = $WM_PROJECT_DIR
+./Allwmake # No arguments
+#
+""")
+ set_executable(self.build_script)
+ self.configure(self.spec, self.prefix) # Should be a separate phase
+
+ def configure(self, spec, prefix):
+ """Make adjustments to the OpenFOAM configuration files in their various
+ locations: etc/bashrc, etc/config.sh/FEATURE and customizations that
+ don't properly fit get placed in the etc/prefs.sh file (similiarly for
+ csh).
+ """
+ self.set_openfoam() # Need foam_cfg/foam_sys information
+
+ # Content for etc/prefs.{csh,sh}
+ self.etc_prefs = {
+ '000': { # Sort first
+ 'compilerInstall': 'System',
+ },
+ '001': {},
+ 'cmake': {
+ 'CMAKE_DIR': spec['cmake'].prefix,
+ 'CMAKE_BIN_DIR': spec['cmake'].prefix.bin,
+ },
+ 'python': {
+ 'PYTHON_DIR': spec['python'].prefix,
+ 'PYTHON_BIN_DIR': spec['python'].prefix.bin,
+ },
+ 'flex': {
+ 'FLEX_SYSTEM': 1,
+ 'FLEX_DIR': spec['flex'].prefix,
+ },
+ 'bison': {
+ 'BISON_SYSTEM': 1,
+ 'BISON_DIR': spec['flex'].prefix,
+ },
+ 'zlib': {
+ 'ZLIB_SYSTEM': 1,
+ 'ZLIB_DIR': spec['zlib'].prefix,
+ },
+ }
+ # Adjust configuration via prefs - sort second
+ self.etc_prefs['001'].update(self.foam_cfg)
+
+ if '+scotch' in spec or '+ptscotch' in spec:
+ pkg = spec['scotch'].prefix
+ self.etc_prefs['scotch'] = {
+ 'SCOTCH_SYSTEM': 1,
+ 'SCOTCH_DIR': pkg,
+ 'SCOTCH_BIN_DIR': pkg.bin,
+ 'SCOTCH_LIB_DIR': pkg.lib,
+ 'SCOTCH_INCLUDE_DIR': pkg.include,
+ }
+
+ if '+metis' in spec:
+ pkg = spec['metis'].prefix
+ self.etc_prefs['metis'] = {
+ 'METIS_SYSTEM': 1,
+ 'METIS_DIR': pkg,
+ 'METIS_BIN_DIR': pkg.bin,
+ 'METIS_LIB_DIR': pkg.lib,
+ 'METIS_INCLUDE_DIR': pkg.include,
+ }
+
+ if '+parmetis' in spec:
+ pkg = spec['parmetis'].prefix
+ self.etc_prefs['parametis'] = {
+ 'PARMETIS_SYSTEM': 1,
+ 'PARMETIS_DIR': pkg,
+ 'PARMETIS_BIN_DIR': pkg.bin,
+ 'PARMETIS_LIB_DIR': pkg.lib,
+ 'PARMETIS_INCLUDE_DIR': pkg.include,
+ }
+
+ if '+parmgridgen' in spec:
+ pkg = spec['parmgridgen'].prefix
+ self.etc_prefs['parmgridgen'] = {
+ 'PARMGRIDGEN_SYSTEM': 1,
+ 'PARMGRIDGEN_DIR': pkg,
+ 'PARMGRIDGEN_BIN_DIR': pkg.bin,
+ 'PARMGRIDGEN_LIB_DIR': pkg.lib,
+ 'PARMGRIDGEN_INCLUDE_DIR': pkg.include,
+ }
- def setup_environment(self, spack_env, run_env):
- with working_dir(self.stage.path):
- spack_env.set('FOAM_INST_DIR', os.path.abspath('.'))
+ if '+paraview' in self.spec:
+ self.etc_prefs['paraview'] = {
+ 'PARAVIEW_SYSTEM': 1,
+ 'PARAVIEW_DIR': spec['paraview'].prefix,
+ 'PARAVIEW_BIN_DIR': spec['paraview'].prefix.bin,
+ }
+ self.etc_prefs['qt'] = {
+ 'QT_SYSTEM': 1,
+ 'QT_DIR': spec['qt'].prefix,
+ 'QT_BIN_DIR': spec['qt'].prefix.bin,
+ }
+
+ # Write prefs files according to the configuration.
+ # Only need prefs.sh for building, but install both for end-users
+ write_environ(
+ self.etc_prefs,
+ posix=join_path('etc', 'prefs.sh'),
+ cshell=join_path('etc', 'prefs.csh'))
+
+ archCompiler = self.foam_sys['WM_ARCH'] + self.foam_sys['WM_COMPILER']
+ compileOpt = self.foam_cfg['WM_COMPILE_OPTION']
+ # general_rule = join_path('wmake', 'rules', 'General')
+ compiler_rule = join_path('wmake', 'rules', archCompiler)
+ generate_mplib_rules(compiler_rule, self.spec)
+ generate_compiler_rules(compiler_rule, compileOpt, self.rpath_info)
+ # Record the spack spec information
+ with open("log.spack-spec", 'w') as outfile:
+ outfile.write(spec.tree())
+
+ def build(self, spec, prefix):
+ """Build using the OpenFOAM Allwmake script, with a wrapper to source
+ its environment first.
+ """
+ self.set_openfoam() # Force proper population of foam_cfg/foam_sys
+ args = []
+ if self.parallel: # Build in parallel? - pass via the environment
+ os.environ['WM_NCOMPPROCS'] = str(self.make_jobs) \
+ if self.make_jobs else str(multiprocessing.cpu_count())
+ builder = Executable(self.build_script)
+ builder(*args)
- (arch, foam_compiler) = self.set_arch()
+ def install(self, spec, prefix):
+ """Install under the projectdir (== prefix/name-version)"""
+ self.build(spec, prefix) # Should be a separate phase
+ opts = self.wm_options
- run_env.set('FOAM_INST_DIR', self.prefix)
+ # Fairly ugly since intermediate targets are scattered inside sources
+ appdir = 'applications'
+ mkdirp(self.projectdir, join_path(self.projectdir, appdir))
- def install(self, spec, prefix):
- env_openfoam = self.get_openfoam_environment()
- env_openfoam.apply_modifications()
+ # Retain build log file
+ out = "spack-build.out"
+ if isfile(out):
+ install(out, join_path(self.projectdir, "log." + opts))
- if self.parallel:
- os.environ['WM_NCOMPPROCS'] = str(self.make_jobs) \
- if self.make_jobs else str(multiprocessing.cpu_count())
+ # All top-level files, except spack build info and possibly Allwmake
+ if '+source' in spec:
+ ignored = re.compile(r'^spack-.*')
+ else:
+ ignored = re.compile(r'^(Allclean|Allwmake|spack-).*')
- allwmake = Executable('./Allwmake')
- allwmake()
+ files = [
+ f for f in glob.glob("*") if isfile(f) and not ignored.search(f)
+ ]
+ for f in files:
+ install(f, self.projectdir)
- install_path = \
- join_path(self.prefix,
- 'foam-extend-{0}'.format(self.version.up_to(2)))
+ # Install directories. install applications/bin directly
+ for d in ['bin', 'etc', 'wmake', 'lib', join_path(appdir, 'bin')]:
+ install_tree(
+ d,
+ join_path(self.projectdir, d))
if '+source' in spec:
- install_tree('src', join_path(install_path, 'src'))
- install_tree('tutorials', join_path(install_path, 'tutorials'))
-
- install_tree('lib', join_path(install_path, 'lib'))
- install_tree('bin', join_path(install_path, 'bin'))
- install_tree('applications', join_path(install_path, 'applications'))
- install_tree('etc', join_path(install_path, 'etc'))
- install_tree('wmake', join_path(install_path, 'wmake'))
+ subitem = join_path(appdir, 'Allwmake')
+ install(subitem, join_path(self.projectdir, subitem))
+
+ ignored = [opts] # Intermediate targets
+ for d in ['src', 'tutorials']:
+ install_tree(
+ d,
+ join_path(self.projectdir, d),
+ ignore=shutil.ignore_patterns(*ignored))
+
+ for d in ['solvers', 'utilities']:
+ install_tree(
+ join_path(appdir, d),
+ join_path(self.projectdir, appdir, d),
+ ignore=shutil.ignore_patterns(*ignored))
+
+ def install_links(self):
+ """Add symlinks into bin/, lib/ (eg, for other applications)"""
+ return
+
+# -----------------------------------------------------------------------------
diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py
index 1fddb77299..ffb85ec15e 100644
--- a/var/spack/repos/builtin/packages/gcc/package.py
+++ b/var/spack/repos/builtin/packages/gcc/package.py
@@ -37,7 +37,7 @@ class Gcc(AutotoolsPackage):
url = "http://ftp.gnu.org/gnu/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2"
list_url = 'http://ftp.gnu.org/gnu/gcc/'
- list_depth = 2
+ list_depth = 1
version('6.3.0', '677a7623c7ef6ab99881bc4e048debb6')
version('6.2.0', '9768625159663b300ae4de2f4745fcc4')
diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py
index b52b1f1038..dc58cc8993 100644
--- a/var/spack/repos/builtin/packages/gdal/package.py
+++ b/var/spack/repos/builtin/packages/gdal/package.py
@@ -38,7 +38,7 @@ class Gdal(Package):
homepage = "http://www.gdal.org/"
url = "http://download.osgeo.org/gdal/2.1.2/gdal-2.1.2.tar.xz"
list_url = "http://download.osgeo.org/gdal/"
- list_depth = 2
+ list_depth = 1
version('2.1.2', 'ae85b78888514c75e813d658cac9478e')
version('2.0.2', '940208e737c87d31a90eaae43d0efd65')
diff --git a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
index 1159744721..2f3a0b0bd7 100644
--- a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
+++ b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
@@ -32,7 +32,9 @@ class GdkPixbuf(AutotoolsPackage):
GTK+ 2 but it was split off into a separate package in
preparation for the change to GTK+ 3."""
homepage = "https://developer.gnome.org/gdk-pixbuf/"
- url = "http://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/2.31/gdk-pixbuf-2.31.1.tar.xz"
+ url = "http://ftp.gnome.org/pub/gnome/sources/gdk-pixbuf/2.31/gdk-pixbuf-2.31.2.tar.xz"
+ list_url = "http://ftp.acc.umu.se/pub/gnome/sources/gdk-pixbuf/"
+ list_depth = 2
version('2.31.2', '6be6bbc4f356d4b79ab4226860ab8523')
diff --git a/var/spack/repos/builtin/packages/glog/package.py b/var/spack/repos/builtin/packages/glog/package.py
index 11d679c7ff..3ea17d2f0c 100644
--- a/var/spack/repos/builtin/packages/glog/package.py
+++ b/var/spack/repos/builtin/packages/glog/package.py
@@ -29,6 +29,9 @@ class Glog(AutotoolsPackage):
"""C++ implementation of the Google logging module."""
homepage = "https://github.com/google/glog"
- url = "https://github.com/google/glog/archive/v0.3.3.tar.gz"
+ url = "https://github.com/google/glog/archive/v0.3.4.tar.gz"
+ version('0.3.4', 'df92e05c9d02504fb96674bc776a41cb')
version('0.3.3', 'c1f86af27bd9c73186730aa957607ed0')
+
+ depends_on('gflags')
diff --git a/var/spack/repos/builtin/packages/gnutls/package.py b/var/spack/repos/builtin/packages/gnutls/package.py
index 638ef7e30c..46fee8d260 100644
--- a/var/spack/repos/builtin/packages/gnutls/package.py
+++ b/var/spack/repos/builtin/packages/gnutls/package.py
@@ -35,8 +35,28 @@ class Gnutls(AutotoolsPackage):
with focus on security and interoperability."""
homepage = "http://www.gnutls.org"
- url = "http://www.ring.gr.jp/pub/net/gnupg/gnutls/v3.3/gnutls-3.3.9.tar.xz"
+ url = "https://www.gnupg.org/ftp/gcrypt/gnutls/v3.5/gnutls-3.5.10.tar.xz"
- version('3.3.9', 'ff61b77e39d09f1140ab5a9cf52c58b6')
+ version('3.5.10', '336c03a71ba90184ffd0388075dde504')
+ version('3.5.9', '0ab25eb6a1509345dd085bc21a387951')
+ version('3.3.9', 'ff61b77e39d09f1140ab5a9cf52c58b6')
- depends_on("nettle")
+ # Note that version 3.3.9 of gnutls doesn't support nettle 3.0.
+ depends_on("nettle@:2.9", when='@3.3.9')
+ depends_on("nettle", when='@3.5:')
+ depends_on("zlib", when='@3.5:')
+
+ build_directory = 'spack-build'
+
+ def url_for_version(self, version):
+ url = "https://www.gnupg.org/ftp/gcrypt/gnutls/v{0}/gnutls-{1}.tar.xz"
+ return url.format(version.up_to(2), version)
+
+ def configure_args(self):
+ args = []
+ if self.spec.satisfies('@3.5:'):
+ # use shipped libraries, might be turned into variants
+ args.append('--with-included-libtasn1')
+ args.append('--with-included-unistring')
+ args.append('--without-p11-kit') # p11-kit@0.23.1: ...
+ return args
diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py
index 6559e90496..c095140c68 100644
--- a/var/spack/repos/builtin/packages/go/package.py
+++ b/var/spack/repos/builtin/packages/go/package.py
@@ -89,13 +89,6 @@ class Go(Package):
r'# \1\2\3',
)
- @when('@1.5.0:')
- def patch(self):
- pass
-
- def url_for_version(self, version):
- return "https://storage.googleapis.com/golang/go{0}.src.tar.gz".format(version)
-
def install(self, spec, prefix):
bash = which('bash')
with working_dir('src'):
diff --git a/var/spack/repos/builtin/packages/googletest/package.py b/var/spack/repos/builtin/packages/googletest/package.py
index 14ceb6ea49..44a96bc170 100644
--- a/var/spack/repos/builtin/packages/googletest/package.py
+++ b/var/spack/repos/builtin/packages/googletest/package.py
@@ -25,7 +25,7 @@
from spack import *
-class Googletest(Package):
+class Googletest(CMakePackage):
"""Google test framework for C++. Also called gtest."""
homepage = "https://github.com/google/googletest"
url = "https://github.com/google/googletest/tarball/release-1.7.0"
@@ -34,17 +34,25 @@ class Googletest(Package):
version('1.7.0', '5eaf03ed925a47b37c8e1d559eb19bc4')
version('1.6.0', '90407321648ab25b067fcd798caf8c78')
- depends_on("cmake", type='build')
-
+ def cmake_args(self):
+ spec = self.spec
+ if '@1.8.0:' in spec:
+ # New style (contains both Google Mock and Google Test)
+ options = ['-DBUILD_GMOCK=OFF', '-DBUILD_GTEST=ON']
+ else:
+ # Old style (contains only GTest)
+ options = []
+ return options
+
+ @when('@:1.7.0')
def install(self, spec, prefix):
- which('cmake')('.', *std_cmake_args)
-
- make()
-
- # Google Test doesn't have a make install
- # We have to do our own install here.
- install_tree('include', prefix.include)
-
- mkdirp(prefix.lib)
- install('./libgtest.a', '%s' % prefix.lib)
- install('./libgtest_main.a', '%s' % prefix.lib)
+ """Make the install targets"""
+ with working_dir(self.build_directory):
+ # Google Test doesn't have a make install
+ # We have to do our own install here.
+ install_tree(join_path(self.stage.source_path, 'include'),
+ prefix.include)
+
+ mkdirp(prefix.lib)
+ install('libgtest.a', prefix.lib)
+ install('libgtest_main.a', prefix.lib)
diff --git a/var/spack/repos/builtin/packages/gource/package.py b/var/spack/repos/builtin/packages/gource/package.py
index 21994ad42c..7d12697d63 100644
--- a/var/spack/repos/builtin/packages/gource/package.py
+++ b/var/spack/repos/builtin/packages/gource/package.py
@@ -52,10 +52,6 @@ class Gource(AutotoolsPackage):
parallel = False
force_autoreconf = True
- def url_for_version(self, version):
- tmp = 'https://github.com/acaudwell/Gource/releases/download/gource-{0}/gource-{0}.tar.gz' # NOQA: ignore=E501
- return tmp.format(version.dotted)
-
def configure_args(self):
spec = self.spec
return [
diff --git a/var/spack/repos/builtin/packages/hdf5-blosc/package.py b/var/spack/repos/builtin/packages/hdf5-blosc/package.py
index 4afce02f70..eb63d08dfd 100644
--- a/var/spack/repos/builtin/packages/hdf5-blosc/package.py
+++ b/var/spack/repos/builtin/packages/hdf5-blosc/package.py
@@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
+from __future__ import print_function
import os
import shutil
import sys
@@ -115,7 +115,7 @@ class Hdf5Blosc(Package):
def check_install(self, spec):
"Build and run a small program to test the installed HDF5 Blosc plugin"
- print "Checking HDF5-Blosc plugin..."
+ print("Checking HDF5-Blosc plugin...")
checkdir = "spack-check"
with working_dir(checkdir, create=True):
source = r"""\
@@ -188,16 +188,16 @@ Done.
output = ""
success = output == expected
if not success:
- print "Produced output does not match expected output."
- print "Expected output:"
- print "-" * 80
- print expected
- print "-" * 80
- print "Produced output:"
- print "-" * 80
- print output
- print "-" * 80
- print "Environment:"
+ print("Produced output does not match expected output.")
+ print("Expected output:")
+ print("-" * 80)
+ print(expected)
+ print("-" * 80)
+ print("Produced output:")
+ print("-" * 80)
+ print(output)
+ print("-" * 80)
+ print("Environment:")
env = which("env")
env()
raise RuntimeError("HDF5 Blosc plugin check failed")
diff --git a/var/spack/repos/builtin/packages/htslib/package.py b/var/spack/repos/builtin/packages/htslib/package.py
index 77829e71b9..20db1c918b 100644
--- a/var/spack/repos/builtin/packages/htslib/package.py
+++ b/var/spack/repos/builtin/packages/htslib/package.py
@@ -31,6 +31,8 @@ class Htslib(AutotoolsPackage):
homepage = "https://github.com/samtools/htslib"
url = "https://github.com/samtools/htslib/releases/download/1.3.1/htslib-1.3.1.tar.bz2"
+ version('1.4', '2a22ff382654c033c40e4ec3ea880050')
version('1.3.1', '16d78f90b72f29971b042e8da8be6843')
depends_on('zlib')
+ depends_on('bzip2', when="@1.4:")
diff --git a/var/spack/repos/builtin/packages/httpie/package.py b/var/spack/repos/builtin/packages/httpie/package.py
new file mode 100644
index 0000000000..0981dc2d3d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/httpie/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Httpie(PythonPackage):
+ """Modern command line HTTP client."""
+
+ homepage = "https://httpie.org/"
+ url = "https://pypi.io/packages/source/h/httpie/httpie-0.9.8.tar.gz"
+
+ version('0.9.8', 'e0d1af07d0959a2e081e7954797ce260')
+
+ variant('socks', default=True,
+ description='Enable SOCKS proxy support')
+
+ depends_on('py-setuptools', type=('build', 'run'))
+ depends_on('py-pygments@2.1.3:', type=('build', 'run'))
+ depends_on('py-requests@2.11.0:', type=('build', 'run'))
+ depends_on('py-pysocks', type=('build', 'run'), when="+socks")
+ # Concretization problem breaks this. Unconditional for now...
+ # https://github.com/LLNL/spack/issues/3628
+ # depends_on('py-argparse@1.2.1:', type=('build', 'run'),
+ # when='^python@:2.6,3.0:3.1')
+ depends_on('py-argparse@1.2.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py
index 0030d5ffde..6efa2dfbd9 100644
--- a/var/spack/repos/builtin/packages/hwloc/package.py
+++ b/var/spack/repos/builtin/packages/hwloc/package.py
@@ -42,7 +42,7 @@ class Hwloc(AutotoolsPackage):
homepage = "http://www.open-mpi.org/projects/hwloc/"
url = "http://www.open-mpi.org/software/hwloc/v1.9/downloads/hwloc-1.9.tar.gz"
list_url = "http://www.open-mpi.org/software/hwloc/"
- list_depth = 3
+ list_depth = 2
version('1.11.6', 'b4e95eadd2fbdb6d40bbd96be6f03c84')
version('1.11.5', '8f5fe6a9be2eb478409ad5e640b2d3ba')
diff --git a/var/spack/repos/builtin/packages/hydra/package.py b/var/spack/repos/builtin/packages/hydra/package.py
index 4461adae2e..fd894b68ee 100644
--- a/var/spack/repos/builtin/packages/hydra/package.py
+++ b/var/spack/repos/builtin/packages/hydra/package.py
@@ -34,6 +34,6 @@ class Hydra(AutotoolsPackage):
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.2/hydra-3.2.tar.gz"
list_url = "http://www.mpich.org/static/downloads/"
- list_depth = 2
+ list_depth = 1
version('3.2', '4d670916695bf7e3a869cc336a881b39')
diff --git a/var/spack/repos/builtin/packages/ibmisc/package.py b/var/spack/repos/builtin/packages/ibmisc/package.py
index f325205507..181ae6d92b 100644
--- a/var/spack/repos/builtin/packages/ibmisc/package.py
+++ b/var/spack/repos/builtin/packages/ibmisc/package.py
@@ -29,9 +29,9 @@ class Ibmisc(CMakePackage):
"""Misc. reusable utilities used by IceBin."""
homepage = "https://github.com/citibeth/ibmisc"
- url = "https://github.com/citibeth/ibmisc/tarball/123"
+ url = "https://github.com/citibeth/ibmisc/archive/v0.1.0.tar.gz"
- version('0.1.0', '12f2a32432a11db48e00217df18e59fa')
+ version('0.1.0', '18c63db3e466c5a6fc2db3f903d06ecb')
variant('everytrace', default=False,
description='Report errors through Everytrace')
diff --git a/var/spack/repos/builtin/packages/icet/package.py b/var/spack/repos/builtin/packages/icet/package.py
index f8260f1951..ca3251ac40 100644
--- a/var/spack/repos/builtin/packages/icet/package.py
+++ b/var/spack/repos/builtin/packages/icet/package.py
@@ -30,7 +30,7 @@ class Icet(CMakePackage):
sort-last parallel rendering library."""
homepage = "http://icet.sandia.gov"
- url = "https://example.com/icet-1.2.3.tar.gz"
+ url = "https://gitlab.kitware.com/icet/icet/repository/archive.tar.bz2?ref=IceT-2.1.1"
version('develop', branch='master',
git='https://gitlab.kitware.com/icet/icet.git')
@@ -38,9 +38,5 @@ class Icet(CMakePackage):
depends_on('mpi')
- def url_for_version(self, version):
- return ("https://gitlab.kitware.com/icet/icet/repository/"
- "archive.tar.bz2?ref=IceT-{0}".format(version.dotted))
-
def cmake_args(self):
return ['-DICET_USE_OPENGL:BOOL=OFF']
diff --git a/var/spack/repos/builtin/packages/image-magick/package.py b/var/spack/repos/builtin/packages/image-magick/package.py
index 9efb0cd368..e32f1967e2 100644
--- a/var/spack/repos/builtin/packages/image-magick/package.py
+++ b/var/spack/repos/builtin/packages/image-magick/package.py
@@ -45,9 +45,6 @@ class ImageMagick(Package):
depends_on('ghostscript')
depends_on('ghostscript-fonts')
- def url_for_version(self, version):
- return "https://github.com/ImageMagick/ImageMagick/archive/{0}.tar.gz".format(version)
-
def install(self, spec, prefix):
gs_font_dir = join_path(spec['ghostscript-fonts'].prefix.share, "font")
configure('--prefix={0}'.format(prefix),
diff --git a/var/spack/repos/builtin/packages/daal/package.py b/var/spack/repos/builtin/packages/intel-daal/package.py
index 18ecfed7c2..011dec158e 100644
--- a/var/spack/repos/builtin/packages/daal/package.py
+++ b/var/spack/repos/builtin/packages/intel-daal/package.py
@@ -28,7 +28,7 @@ import os
from spack.pkg.builtin.intel import IntelInstaller
-class Daal(IntelInstaller):
+class IntelDaal(IntelInstaller):
"""Intel Data Analytics Acceleration Library.
Note: You will have to add the download file to a
@@ -44,6 +44,8 @@ class Daal(IntelInstaller):
version('2016.3.210', 'ad747c0dd97dace4cad03cf2266cad28',
url="file://%s/l_daal_2016.3.210.tgz" % os.getcwd())
+ provides('daal')
+
def install(self, spec, prefix):
self.intel_prefix = os.path.join(prefix, "pkg")
diff --git a/var/spack/repos/builtin/packages/ipp/package.py b/var/spack/repos/builtin/packages/intel-ipp/package.py
index a9765e1a0a..3c37b2342f 100644
--- a/var/spack/repos/builtin/packages/ipp/package.py
+++ b/var/spack/repos/builtin/packages/intel-ipp/package.py
@@ -28,7 +28,7 @@ import os
from spack.pkg.builtin.intel import IntelInstaller
-class Ipp(IntelInstaller):
+class IntelIpp(IntelInstaller):
"""Intel Integrated Performance Primitives.
Note: You will have to add the download file to a
@@ -42,6 +42,8 @@ class Ipp(IntelInstaller):
version('9.0.3.210', '0e1520dd3de7f811a6ef6ebc7aa429a3',
url="file://%s/l_ipp_9.0.3.210.tgz" % os.getcwd())
+ provides('ipp')
+
def install(self, spec, prefix):
self.intel_prefix = os.path.join(prefix, "pkg")
diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py
index 5e108a0867..59b66d63ad 100644
--- a/var/spack/repos/builtin/packages/intel-mkl/package.py
+++ b/var/spack/repos/builtin/packages/intel-mkl/package.py
@@ -29,26 +29,18 @@ from spack.pkg.builtin.intel import IntelInstaller
class IntelMkl(IntelInstaller):
- """Intel Math Kernel Library.
-
- Note: You will have to add the download file to a
- mirror so that Spack can find it. For instructions on how to set up a
- mirror, see http://spack.readthedocs.io/en/latest/mirrors.html.
-
- To set the threading layer at run time set MKL_THREADING_LAYER
- variable to one of the following values: INTEL (default), SEQUENTIAL, PGI.
- To set interface layer at run time, use set the MKL_INTERFACE_LAYER
- variable to LP64 (default) or ILP64.
- """
+ """Intel Math Kernel Library."""
homepage = "https://software.intel.com/en-us/intel-mkl"
+ version('2017.2.174', 'ef39a12dcbffe5f4a0ef141b8759208c',
+ url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/11306/l_mkl_2017.2.174.tgz")
version('2017.0.098', '3cdcb739ab5ab1e047eb130b9ffdd8d0',
- url="file://%s/l_mkl_2017.0.098.tgz" % os.getcwd())
- version('11.3.2.181', '536dbd82896d6facc16de8f961d17d65',
- url="file://%s/l_mkl_11.3.2.181.tgz" % os.getcwd())
+ url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9662/l_mkl_2017.0.098.tgz")
version('11.3.3.210', 'f72546df27f5ebb0941b5d21fd804e34',
- url="file://%s/l_mkl_11.3.3.210.tgz" % os.getcwd())
+ url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/9068/l_mkl_11.3.3.210.tgz")
+ version('11.3.2.181', '536dbd82896d6facc16de8f961d17d65',
+ url="http://registrationcenter-download.intel.com/akdlm/irc_nas/tec/8711/l_mkl_11.3.2.181.tgz")
variant('shared', default=True, description='Builds shared library')
variant('ilp64', default=False, description='64 bit integers')
diff --git a/var/spack/repos/builtin/packages/iozone/package.py b/var/spack/repos/builtin/packages/iozone/package.py
new file mode 100644
index 0000000000..530c609f0d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/iozone/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Iozone(MakefilePackage):
+ """IOzone is a filesystem benchmark tool. The benchmark generates and
+ measures a variety of file operations. Iozone has been ported to many
+ machines and runs under many operating systems."""
+
+ homepage = "http://www.iozone.org/"
+ url = "http://www.iozone.org/src/current/iozone3_465.tar"
+
+ version('3_465', 'c924e5e46fb1cf8145f420e8e57eb954')
+
+ # TODO: Add support for other architectures as necessary
+ build_targets = ['linux-AMD64']
+
+ build_directory = 'src/current'
+
+ def edit(self, spec, prefix):
+ with working_dir(self.build_directory):
+ filter_file(r'^CC\t= cc',
+ r'CC\t= {0}'.format(spack_cc),
+ 'makefile')
+
+ def install(self, spec, prefix):
+ install_tree('docs', join_path(prefix, 'docs'))
+
+ with working_dir(self.build_directory):
+ install_tree('.', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py
index 518a469435..8df01f4b67 100644
--- a/var/spack/repos/builtin/packages/jdk/package.py
+++ b/var/spack/repos/builtin/packages/jdk/package.py
@@ -45,10 +45,10 @@ class Jdk(Package):
'-H', # specify required License Agreement cookie
'Cookie: oraclelicense=accept-securebackup-cookie']
- version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf',
+ version('8u66', '88f31f3d642c3287134297b8c10e61bf',
url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz",
curl_options=curl_options)
- version('8u92-linux-x64', '65a1cc17ea362453a6e0eb4f13be76e4',
+ version('8u92', '65a1cc17ea362453a6e0eb4f13be76e4',
url="http://download.oracle.com/otn-pub/java/jdk/8u92-b14/jdk-8u92-linux-x64.tar.gz",
curl_options=curl_options)
diff --git a/var/spack/repos/builtin/packages/jq/package.py b/var/spack/repos/builtin/packages/jq/package.py
new file mode 100644
index 0000000000..28e1c4dcfb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/jq/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Jq(AutotoolsPackage):
+ """jq is a lightweight and flexible command-line JSON processor."""
+
+ homepage = "https://stedolan.github.io/jq/"
+ url = "https://github.com/stedolan/jq/archive/jq-1.5.tar.gz"
+
+ version('1.5', 'c8070bd6ec275404f77db3d2e568c9a3')
+
+ depends_on('oniguruma')
+ depends_on('bison@3.0:', type='build')
diff --git a/var/spack/repos/builtin/packages/kaldi/package.py b/var/spack/repos/builtin/packages/kaldi/package.py
new file mode 100644
index 0000000000..3cf25df46b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/kaldi/package.py
@@ -0,0 +1,107 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from distutils.dir_util import copy_tree
+from os.path import join
+from fnmatch import fnmatch
+import os
+
+
+class Kaldi(Package): # Does not use Autotools
+ """Kaldi is a toolkit for speech recognition written
+ in C++ and licensed under the Apache License v2.0.
+ Kaldi is intended for use by speech recognition researchers."""
+
+ homepage = "https://github.com/kaldi-asr/kaldi"
+ url = "https://github.com/kaldi-asr/kaldi/archive/master.zip"
+
+ version('master', git='https://github.com/kaldi-asr/kaldi.git')
+
+ variant('shared', default=True,
+ description='build shared libraries')
+ variant('double', default=False,
+ description='build with double precision floats')
+ variant('cuda', default=False,
+ description='build with CUDA')
+
+ depends_on('blas')
+ depends_on('speex')
+ depends_on('openfst@1.6:')
+ depends_on('cuda', when='+cuda')
+ depends_on('sph2pipe', type='run')
+ depends_on('sctk', type='run')
+
+ def install(self, spec, prefix):
+ configure_args = [
+ '--threaded-math',
+ '--speex-root=' + spec['speex'].prefix,
+ '--fst-root=' + spec['openfst'].prefix,
+ '--fst-version=' + str(spec['openfst'].version)
+ ]
+
+ if '~shared' in spec:
+ configure_args.append('--static')
+ else:
+ configure_args.append('--shared')
+
+ if '^openblas' in spec:
+ configure_args.append('--mathlib=OPENBLAS')
+ configure_args.append('--openblas-root=' + spec['blas'].prefix)
+ if '+openmp' in spec['blas'].variants:
+ configure_args.append('--threaded-math')
+ elif '^atlas' in spec:
+ configure_args.append('--mathlib=ATLAS')
+ configure_args.append('--atlas-root=' + spec['blas'].prefix)
+ if '+pthread' in spec['blas'].variants:
+ configure_args.append('--threaded-atlas')
+ elif '^intel-parallel-studio' in spec or '^intel-mkl' in spec:
+ configure_args.append('--mathlib=MKL')
+ configure_args.append('--mkl-root=' + spec['blas'].prefix)
+ if '+openmp' in spec['blas'].variants:
+ configure_args.append('--mkl-threading=iomp')
+
+ if '+cuda' in spec:
+ configure_args.append('--use-cuda=yes')
+ configure_args.append('--cudatk-dir=' + spec['cuda'].prefix)
+
+ with working_dir("src"):
+ configure(*configure_args)
+ make()
+
+ mkdirp(prefix.bin)
+ for root, dirs, files in os.walk('bin'):
+ for name in files:
+ if os.access(join(root, name), os.X_OK):
+ install(join(root, name), prefix.bin)
+
+ mkdir(prefix.lib)
+ copy_tree('lib', prefix.lib)
+
+ for root, dirs, files in os.walk('.'):
+ for name in files:
+ if fnmatch(name, '*.h'):
+ mkdirp(join(prefix.include, root.strip("./")))
+ install(join(root, name),
+ join(prefix.include, root.strip("./")))
diff --git a/var/spack/repos/builtin/packages/libedit/package.py b/var/spack/repos/builtin/packages/libedit/package.py
index 5dcee61cab..6887d67101 100644
--- a/var/spack/repos/builtin/packages/libedit/package.py
+++ b/var/spack/repos/builtin/packages/libedit/package.py
@@ -30,7 +30,11 @@ class Libedit(AutotoolsPackage):
homepage = "http://thrysoee.dk/editline/"
url = "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz"
- version('3.1', '43cdb5df3061d78b5e9d59109871b4f6',
- url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz")
+ version('3.1-20160903', '0467d27684c453a351fbcefebbcb16a3')
+ version('3.1-20150325', '43cdb5df3061d78b5e9d59109871b4f6')
depends_on('ncurses')
+
+ def url_for_version(self, version):
+ url = "http://thrysoee.dk/editline/libedit-{0}-{1}.tar.gz"
+ return url.format(version[-1], version.up_to(-1))
diff --git a/var/spack/repos/builtin/packages/libgd/package.py b/var/spack/repos/builtin/packages/libgd/package.py
index adce0b7515..c70d8b56fd 100644
--- a/var/spack/repos/builtin/packages/libgd/package.py
+++ b/var/spack/repos/builtin/packages/libgd/package.py
@@ -56,7 +56,3 @@ class Libgd(AutotoolsPackage):
depends_on('libpng')
depends_on('libtiff')
depends_on('fontconfig')
-
- def url_for_version(self, version):
- url = "https://github.com/libgd/libgd/releases/download/gd-{0}/libgd-{0}.tar.gz"
- return url.format(version)
diff --git a/var/spack/repos/builtin/packages/libint/package.py b/var/spack/repos/builtin/packages/libint/package.py
index 2ad5e93191..569aa68b68 100644
--- a/var/spack/repos/builtin/packages/libint/package.py
+++ b/var/spack/repos/builtin/packages/libint/package.py
@@ -25,25 +25,27 @@
from spack import *
-class Libint(Package):
+class Libint(AutotoolsPackage):
"""Libint is a high-performance library for computing
- Gaussian integrals in quantum mechanics."""
+ Gaussian integrals in quantum mechanics.
+ """
homepage = "https://github.com/evaleev/libint"
- url = "https://github.com/evaleev/libint/archive/v2.1.0.tar.gz"
+ url = "https://github.com/evaleev/libint/archive/v2.1.0.tar.gz"
+ version('2.2.0', 'da37dab862fb0b97a7ed7d007695ef47')
version('2.1.0', 'd0dcb985fe32ddebc78fe571ce37e2d6')
version('1.1.6', '990f67b55f49ecc18f32c58da9240684')
version('1.1.5', '379b7d0718ff398715d6898807adf628')
# Build dependencies
depends_on('autoconf@2.52:', type='build')
- depends_on('automake', type='build')
- depends_on('libtool', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
# Libint 2 dependencies
depends_on('boost', when='@2:')
- depends_on('gmp', when='@2:')
+ depends_on('gmp', when='@2:')
def url_for_version(self, version):
base_url = "https://github.com/evaleev/libint/archive"
@@ -54,16 +56,14 @@ class Libint(Package):
else:
return "{0}/v{1}.tar.gz".format(base_url, version)
- def install(self, spec, prefix):
- # Generate configure
+ def autoreconf(self, spec, prefix):
libtoolize()
aclocal('-I', 'lib/autoconf')
autoconf()
- config_args = [
- '--prefix={0}'.format(prefix),
- '--enable-shared'
- ]
+ def configure_args(self):
+
+ config_args = ['--enable-shared']
# Optimizations for the Intel compiler, suggested by CP2K
optflags = '-O2'
@@ -93,12 +93,4 @@ class Libint(Package):
'--with-libint-max-am=5',
'--with-libderiv-max-am1=4'
])
-
- configure(*config_args)
- make()
-
- # Testing suite was added in libint 2
- if self.version >= Version('2.0.0'):
- make('check')
-
- make('install')
+ return config_args
diff --git a/var/spack/repos/builtin/packages/libpfm4/package.py b/var/spack/repos/builtin/packages/libpfm4/package.py
new file mode 100644
index 0000000000..c7463afeeb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libpfm4/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libpfm4(MakefilePackage):
+ """libpfm4 is a userspace library to help
+ setup performance events for use with
+ the perf_events Linux kernel interface."""
+
+ homepage = "http://perfmon2.sourceforge.net"
+ url = "https://downloads.sourceforge.net/project/perfmon2/libpfm4/libpfm-4.8.0.tar.gz"
+
+ version('4.8.0', '730383896db92e12fb2cc10f2d41dd43')
+
+ # Fails to build libpfm4 with intel compiler version 16 and 17
+ conflicts('%intel@16:17')
+
+ @property
+ def install_targets(self):
+ return ['DESTDIR={0}'.format(self.prefix),
+ 'LIBDIR=/lib',
+ 'INCDIR=/include',
+ 'MANDIR=/man',
+ 'LDCONFIG=true',
+ 'install']
diff --git a/var/spack/repos/builtin/packages/libsodium/package.py b/var/spack/repos/builtin/packages/libsodium/package.py
index a7e3ab10ae..6d21d65345 100644
--- a/var/spack/repos/builtin/packages/libsodium/package.py
+++ b/var/spack/repos/builtin/packages/libsodium/package.py
@@ -30,6 +30,7 @@ class Libsodium(AutotoolsPackage):
decryption, signatures, password hashing and more."""
homepage = "https://download.libsodium.org/doc/"
url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.11.tar.gz"
+ list_url = "https://download.libsodium.org/libsodium/releases/old"
version('1.0.11', 'b58928d035064b2a46fb564937b83540')
version('1.0.10', 'ea89dcbbda0b2b6ff6a1c476231870dd')
diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py
index abc5dc5cde..8552d3c2f0 100644
--- a/var/spack/repos/builtin/packages/libxsmm/package.py
+++ b/var/spack/repos/builtin/packages/libxsmm/package.py
@@ -32,10 +32,11 @@ class Libxsmm(Package):
and small convolutions.'''
homepage = 'https://github.com/hfp/libxsmm'
- url = 'https://github.com/hfp/libxsmm/archive/1.7.1.tar.gz'
+ url = 'https://github.com/hfp/libxsmm/archive/1.8.tar.gz'
version('develop', git='https://github.com/hfp/libxsmm.git')
+ version('1.8', '2d513afbdad99e5d04c6c4ab4c9bb25b')
version('1.7.1', 'a938335b1c2c90616dc72c2c1a5824ab')
version('1.7', 'cb3aff6d123ba70bd3d4daf575767d14')
version('1.6.6', '8b45ae022f36b8c212f579a0952b5034')
diff --git a/var/spack/repos/builtin/packages/libxstream/package.py b/var/spack/repos/builtin/packages/libxstream/package.py
index 3201b58620..0996e6b9e8 100644
--- a/var/spack/repos/builtin/packages/libxstream/package.py
+++ b/var/spack/repos/builtin/packages/libxstream/package.py
@@ -31,9 +31,9 @@ class Libxstream(Package):
conditions.'''
homepage = 'https://github.com/hfp/libxstream'
- url = 'https://github.com/hfp/libxstream.git'
+ url = 'https://github.com/hfp/libxstream/archive/0.9.0.tar.gz'
- version('0.9.0', git='https://github.com/hfp/libxstream.git')
+ version('0.9.0', 'fd74b7cf5f145ff4925d91be2809571c')
def patch(self):
kwargs = {'ignore_absent': False, 'backup': True, 'string': True}
diff --git a/var/spack/repos/builtin/packages/libzip/package.py b/var/spack/repos/builtin/packages/libzip/package.py
new file mode 100644
index 0000000000..e3dc9ab2bb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libzip/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libzip(AutotoolsPackage):
+ """libzip is a C library for reading, creating,
+ and modifying zip archives."""
+
+ homepage = "https://nih.at/libzip/index.html"
+ url = "https://nih.at/libzip/libzip-1.2.0.tar.gz"
+
+ version('1.2.0', '5c3372ab3a7897295bfefb27f745cf69')
diff --git a/var/spack/repos/builtin/packages/llvm-openmp-ompt/package.py b/var/spack/repos/builtin/packages/llvm-openmp-ompt/package.py
index 8b83f216b6..7cc42e0f28 100644
--- a/var/spack/repos/builtin/packages/llvm-openmp-ompt/package.py
+++ b/var/spack/repos/builtin/packages/llvm-openmp-ompt/package.py
@@ -33,13 +33,17 @@ class LlvmOpenmpOmpt(Package):
homepage = "https://github.com/OpenMPToolsInterface/LLVM-openmp"
+ # tr4-stable branch
+ version('3.9.2b2',
+ git='https://github.com/OpenMPToolsInterface/LLVM-openmp.git',
+ commit='5cdca5dd3c0c336d42a335ca7cff622e270c9d47')
# align-to-tr-rebased branch
version('3.9.2b',
git='https://github.com/OpenMPToolsInterface/LLVM-openmp.git',
commit='982a08bcf3df9fb5afc04ac3bada47f19cc4e3d3')
depends_on('cmake', type='build')
- depends_on('llvm+clang~gold')
+ depends_on('llvm')
depends_on('ninja', type='build')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py
index 7b17d5db5f..b2624bb362 100644
--- a/var/spack/repos/builtin/packages/lmod/package.py
+++ b/var/spack/repos/builtin/packages/lmod/package.py
@@ -38,6 +38,7 @@ class Lmod(AutotoolsPackage):
homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod'
url = 'https://github.com/TACC/Lmod/archive/7.3.tar.gz'
+ version('7.4.1', '59b2558ee50877f2cf49ed37d7b09fea')
version('7.3', '70180ec2ea1fae53aa83350523f6b2b3')
version('6.4.5', '14f6c58dbc0a5a75574d795eac2c1e3c')
version('6.4.1', '7978ba777c8aa41a4d8c05fec5f780f4')
diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py
index 2c1018e711..00b9c4ea09 100644
--- a/var/spack/repos/builtin/packages/meep/package.py
+++ b/var/spack/repos/builtin/packages/meep/package.py
@@ -30,6 +30,8 @@ class Meep(Package):
software package developed at MIT to model electromagnetic systems."""
homepage = "http://ab-initio.mit.edu/wiki/index.php/Meep"
+ url = "http://ab-initio.mit.edu/meep/meep-1.3.tar.gz"
+ list_url = "http://ab-initio.mit.edu/meep/old"
version('1.3', '18a5b9e18008627a0411087e0bb60db5')
version('1.2.1', '9be2e743c3a832ae922de9d955d016c5')
diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py
index f15d544b7b..c927c22a1b 100644
--- a/var/spack/repos/builtin/packages/metis/package.py
+++ b/var/spack/repos/builtin/packages/metis/package.py
@@ -37,7 +37,8 @@ class Metis(Package):
partitioning schemes."""
homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview"
- base_url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis"
+ url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz"
+ list_url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD"
version('5.1.0', '5465e67079419a69e0116de24fce58fe')
version('5.0.2', 'acb521a4e8c2e6dd559a7f9abd0468c5')
@@ -55,12 +56,11 @@ class Metis(Package):
patch('install_gklib_defs_rename.patch', when='@5:')
def url_for_version(self, version):
- verdir = 'OLD/' if version < Version('4.0.3') else ''
- return '%s/%smetis-%s.tar.gz' % (Metis.base_url, verdir, version)
-
- @when('@:4')
- def patch(self):
- pass
+ url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis"
+ if version < Version('4.0.3'):
+ url += "/OLD"
+ url += "/metis-{0}.tar.gz".format(version)
+ return url
@when('@5:')
def patch(self):
@@ -84,7 +84,7 @@ class Metis(Package):
filter_file('#define MAX_JBUFS 128', '#define MAX_JBUFS 24',
join_path(source_path, 'GKlib', 'error.c'))
- @when('@:4')
+ @when('@:4') # noqa: F811
def install(self, spec, prefix):
# Process library spec and options
if any('+{0}'.format(v) in spec for v in ['gdb', 'int64', 'real64']):
@@ -175,7 +175,7 @@ class Metis(Package):
Executable(test_bin('mesh2dual'))(test_graph('metis.mesh'))
"""
- @when('@5:')
+ @when('@5:') # noqa: F811
def install(self, spec, prefix):
source_directory = self.stage.source_path
build_directory = join_path(source_directory, 'build')
@@ -187,7 +187,7 @@ class Metis(Package):
if '+shared' in spec:
options.append('-DSHARED:BOOL=ON')
else:
- # Remove all RPATH options
+ # Remove all RPATH options
# (RPATHxxx options somehow trigger cmake to link dynamically)
rpath_options = []
for o in options:
diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py
index a25583e164..b3fe5197a0 100644
--- a/var/spack/repos/builtin/packages/mfem/package.py
+++ b/var/spack/repos/builtin/packages/mfem/package.py
@@ -31,15 +31,32 @@ class Mfem(Package):
homepage = 'http://www.mfem.org'
url = 'https://github.com/mfem/mfem'
+ # mfem is downloaded from a URL shortener at request of upstream
+ # author Tzanio Kolev <tzanio@llnl.gov>. See here:
+ # https://github.com/mfem/mfem/issues/53
+ #
+ # The following procedure should be used to verify security when a
+ # new verison is added:
+ #
+ # 1. Verify that no checksums on old versions have changed.
+ #
+ # 2. Verify that the shortened URL for the new version is listed at:
+ # http://mfem.org/download/
+ #
+ # 3. Use http://getlinkinfo.com or similar to verify that the
+ # underling download link for the latest version comes has the
+ # prefix: http://mfem.github.io/releases
+ #
+ # If this quick verification procedure fails, additional discussion
+ # will be required to verify the new version.
+
version('3.2',
'2938c3deed4ec4f7fd5b5f5cfe656845282e86e2dcd477d292390058b7b94340',
- url='http://goo.gl/Y9T75B', preferred=True, extension='.tar.gz')
+ url='http://goo.gl/Y9T75B', extension='.tar.gz')
version('3.1',
'841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57',
url='http://goo.gl/xrScXn', extension='.tar.gz')
-# version('3.1', git='https://github.com/mfem/mfem.git',
-# commit='dbae60fe32e071989b52efaaf59d7d0eb2a3b574')
variant('metis', default=False, description='Activate support for metis')
variant('hypre', default=False, description='Activate support for hypre')
diff --git a/var/spack/repos/builtin/packages/miniconda2/package.py b/var/spack/repos/builtin/packages/miniconda2/package.py
new file mode 100644
index 0000000000..d23ab080b0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/miniconda2/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from six.moves.urllib.parse import urlparse
+from os.path import split
+
+
+class Miniconda2(Package):
+ """The minimalist bootstrap toolset for conda and Python2."""
+
+ homepage = "https://conda.io/miniconda.html"
+ url = "https://repo.continuum.io/miniconda/Miniconda2-4.3.11-Linux-x86_64.sh"
+
+ version('4.3.11', 'd573980fe3b5cdf80485add2466463f5', expand=False)
+
+ def install(self, spec, prefix):
+ # peel the name of the script out of the url
+ result = urlparse(self.url)
+ dir, script = split(result.path)
+ bash = which('bash')
+ bash(script, '-b', '-f', '-p', self.prefix)
diff --git a/var/spack/repos/builtin/packages/miniconda3/package.py b/var/spack/repos/builtin/packages/miniconda3/package.py
new file mode 100644
index 0000000000..8184c10d88
--- /dev/null
+++ b/var/spack/repos/builtin/packages/miniconda3/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from six.moves.urllib.parse import urlparse
+from os.path import split
+
+
+class Miniconda3(Package):
+ """The minimalist bootstrap toolset for conda and Python3."""
+
+ homepage = "https://conda.io/miniconda.html"
+ url = "https://repo.continuum.io/miniconda/Miniconda3-4.3.11-Linux-x86_64.sh"
+
+ version('4.3.11', '1924c8d9ec0abf09005aa03425e9ab1a', expand=False)
+
+ def install(self, spec, prefix):
+ # peel the name of the script out of the url
+ result = urlparse(self.url)
+ dir, script = split(result.path)
+ bash = which('bash')
+ bash(script, '-b', '-f', '-p', self.prefix)
diff --git a/var/spack/repos/builtin/packages/mitos/package.py b/var/spack/repos/builtin/packages/mitos/package.py
index d577a1b285..4ccddb3592 100644
--- a/var/spack/repos/builtin/packages/mitos/package.py
+++ b/var/spack/repos/builtin/packages/mitos/package.py
@@ -30,13 +30,12 @@ class Mitos(Package):
performance data to view with MemAxes"""
homepage = "https://github.com/llnl/Mitos"
- url = "https://github.com/llnl/Mitos"
+ url = "https://github.com/LLNL/Mitos/archive/v0.9.1.tar.gz"
version('0.9.2',
git='https://github.com/llnl/Mitos.git',
commit='8cb143a2e8c00353ff531a781a9ca0992b0aaa3d')
-
- version('0.9.1', git='https://github.com/llnl/Mitos.git', tag='v0.9.1')
+ version('0.9.1', 'c6cb57f3cae54f5157affd97ef7ef79e')
depends_on('dyninst@8.2.1:')
depends_on('hwloc')
diff --git a/var/spack/repos/builtin/packages/moab/package.py b/var/spack/repos/builtin/packages/moab/package.py
index b783d7b81b..14925cfd3e 100644
--- a/var/spack/repos/builtin/packages/moab/package.py
+++ b/var/spack/repos/builtin/packages/moab/package.py
@@ -35,7 +35,7 @@ class Moab(Package):
mesh in chunks rather than through individual entities, while also
versatile enough to support individual entity access."""
homepage = "https://bitbucket.org/fathomteam/moab"
- url = "http://ftp.mcs.anl.gov/pub/fathom/moab-4.6.3.tar.gz"
+ url = "http://ftp.mcs.anl.gov/pub/fathom/moab-4.9.1.tar.gz"
version('4.9.1', '19cc2189fa266181ad9109b18d0b2ab8')
version('4.9.0', '40695d0a159040683cfa05586ad4a7c2')
diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index 09fc683874..819fc95d5b 100644
--- a/var/spack/repos/builtin/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -32,7 +32,7 @@ class Mpich(AutotoolsPackage):
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
list_url = "http://www.mpich.org/static/downloads/"
- list_depth = 2
+ list_depth = 1
version('3.2', 'f414cfa77099cd1fa1a5ae4e22db508a')
version('3.1.4', '2ab544607986486562e076b83937bba2')
diff --git a/var/spack/repos/builtin/packages/mummer/package.py b/var/spack/repos/builtin/packages/mummer/package.py
new file mode 100644
index 0000000000..49533ed95e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mummer/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mummer(Package):
+ """MUMmer is a system for rapidly aligning entire genomes."""
+
+ homepage = "http://mummer.sourceforge.net/"
+ url = "https://sourceforge.net/projects/mummer/files/mummer/3.23/MUMmer3.23.tar.gz/download"
+
+ version('3.23', 'f2422b3d2638dba4baedb71b1acdffa2')
+
+ depends_on('gnuplot')
+
+ def install(self, spec, prefix):
+ if self.run_tests:
+ make('check')
+ make('install')
+ mkdirp(prefix.bin)
+
+ bins = ["show-tiling", "show-snps", "show-coords", "show-aligns",
+ "show-diff", "delta-filter", "combineMUMs", "mummer",
+ "repeat-match", "annotate", "mgaps", "gaps", "dnadiff",
+ "nucmer2xfig", "run-mummer3", "mummerplot", "promer",
+ "run-mummer1", "nucmer", "mapview", "exact-tandems"]
+ aux_bins = ["aux_bin/postnuc", "aux_bin/postpro",
+ "aux_bin/prenuc", "aux_bin/prepro"]
+
+ for b in bins:
+ install(b, join_path(prefix.bin, b))
+ for b in aux_bins:
+ install(b, join_path(prefix.bin, b[8:]))
diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py
index 55b593f623..7c10287729 100644
--- a/var/spack/repos/builtin/packages/mvapich2/package.py
+++ b/var/spack/repos/builtin/packages/mvapich2/package.py
@@ -30,8 +30,13 @@ class Mvapich2(Package):
"""MVAPICH2 is an MPI implementation for Infiniband networks."""
homepage = "http://mvapich.cse.ohio-state.edu/"
url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2.tar.gz"
+ list_url = "http://mvapich.cse.ohio-state.edu/downloads/"
- version('2.2', '939b65ebe5b89a5bc822cdab0f31f96e')
+ # Newer alpha release
+ version('2.3a', '87c3fbf8a755b53806fa9ecb21453445')
+
+ # Prefer the latest stable release
+ version('2.2', '939b65ebe5b89a5bc822cdab0f31f96e', preferred=True)
version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6')
version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
version('1.9', '5dc58ed08fd3142c260b70fe297e127c')
diff --git a/var/spack/repos/builtin/packages/mxml/package.py b/var/spack/repos/builtin/packages/mxml/package.py
index fcb7959678..435fd748b3 100644
--- a/var/spack/repos/builtin/packages/mxml/package.py
+++ b/var/spack/repos/builtin/packages/mxml/package.py
@@ -31,8 +31,8 @@ class Mxml(AutotoolsPackage):
non-standard libraries.
"""
- homepage = "http://www.msweet.org"
- url = "http://www.msweet.org/files/project3/mxml-2.9.tar.gz"
+ homepage = "http://michaelrsweet.github.io/mxml/"
+ url = "https://github.com/michaelrsweet/mxml/releases/download/release-2.10/mxml-2.10.tar.gz"
version('2.10', '8804c961a24500a95690ef287d150abe')
version('2.9', 'e21cad0f7aacd18f942aa0568a8dee19')
diff --git a/var/spack/repos/builtin/packages/nccl/package.py b/var/spack/repos/builtin/packages/nccl/package.py
index b68d3429d1..52be43aa25 100644
--- a/var/spack/repos/builtin/packages/nccl/package.py
+++ b/var/spack/repos/builtin/packages/nccl/package.py
@@ -37,5 +37,9 @@ class Nccl(MakefilePackage):
depends_on('cuda')
@property
+ def build_targets(self):
+ return ['CUDA_HOME={0}'.format(self.spec['cuda'].prefix)]
+
+ @property
def install_targets(self):
return ['PREFIX={0}'.format(self.prefix), 'install']
diff --git a/var/spack/repos/builtin/packages/ncftp/package.py b/var/spack/repos/builtin/packages/ncftp/package.py
new file mode 100644
index 0000000000..8b515af242
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ncftp/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ncftp(AutotoolsPackage):
+ """NcFTP Client is a set of application programs implementing the
+ File Transfer Protocol."""
+
+ homepage = "http://www.ncftp.com/"
+ url = "ftp://ftp.ncftp.com/ncftp/ncftp-3.2.6-src.tar.gz"
+
+ version('3.2.6', 'e7cce57ef6274d4c7433ffe28ffe0a71')
+
+ depends_on('ncurses')
diff --git a/var/spack/repos/builtin/packages/ncl/package.py b/var/spack/repos/builtin/packages/ncl/package.py
index 9f834eee3b..b7394cff32 100644
--- a/var/spack/repos/builtin/packages/ncl/package.py
+++ b/var/spack/repos/builtin/packages/ncl/package.py
@@ -229,6 +229,6 @@ class Ncl(Package):
if os.path.exists(filename):
try:
os.remove(filename)
- except OSError, e:
+ except OSError as e:
raise InstallError('Failed to delete file %s: %s' % (
e.filename, e.strerror))
diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
index 6a97180328..e860926f96 100644
--- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
@@ -26,7 +26,7 @@ from spack import *
import sys
-class NetlibScalapack(Package):
+class NetlibScalapack(CMakePackage):
"""ScaLAPACK is a library of high-performance linear algebra routines for
parallel distributed memory machines
"""
@@ -60,12 +60,16 @@ class NetlibScalapack(Package):
@property
def scalapack_libs(self):
+ # Note that the default will be to search
+ # for 'libnetlib-scalapack.<suffix>'
shared = True if '+shared' in self.spec else False
return find_libraries(
'libscalapack', root=self.prefix, shared=shared, recurse=True
)
- def install(self, spec, prefix):
+ def cmake_args(self):
+ spec = self.spec
+
options = [
"-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else
'OFF'),
@@ -89,13 +93,10 @@ class NetlibScalapack(Package):
"-DCMAKE_Fortran_FLAGS=-fPIC"
])
- options.extend(std_cmake_args)
-
- with working_dir('spack-build', create=True):
- cmake('..', *options)
- make()
- make("install")
+ return options
+ @run_after('install')
+ def fix_darwin_install(self):
# The shared libraries are not installed correctly on Darwin:
- if (sys.platform == 'darwin') and ('+shared' in spec):
- fix_darwin_install_name(prefix.lib)
+ if (sys.platform == 'darwin') and ('+shared' in self.spec):
+ fix_darwin_install_name(self.spec.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py
index 2b6693a18c..e9df8489c9 100644
--- a/var/spack/repos/builtin/packages/nettle/package.py
+++ b/var/spack/repos/builtin/packages/nettle/package.py
@@ -30,7 +30,7 @@ class Nettle(AutotoolsPackage):
that is designed to fit easily in many contexts."""
homepage = "https://www.lysator.liu.se/~nisse/nettle/"
- url = "http://ftp.gnu.org/gnu/nettle/nettle-2.7.1.tar.gz"
+ url = "http://ftp.gnu.org/gnu/nettle/nettle-3.2.tar.gz"
version('3.2', 'afb15b4764ebf1b4e6d06c62bd4d29e4')
version('2.7.1', '003d5147911317931dd453520eb234a5')
diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py
index 54f3dbf915..05f21f70b7 100644
--- a/var/spack/repos/builtin/packages/nextflow/package.py
+++ b/var/spack/repos/builtin/packages/nextflow/package.py
@@ -29,9 +29,13 @@ class Nextflow(Package):
"""Data-driven computational pipelines"""
homepage = "http://www.nextflow.io"
+ url = "https://github.com/nextflow-io/nextflow/releases/download/v0.24.1/nextflow"
+ version('0.24.1', '80ec8c4fe8e766e0bdd1371a50410d1d',
+ expand=False)
+ version('0.23.3', '71fb69275b6788af1c6f1165f40d362e',
+ expand=False)
version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a',
- url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow',
expand=False)
depends_on('jdk')
diff --git a/var/spack/repos/builtin/packages/nwchem/package.py b/var/spack/repos/builtin/packages/nwchem/package.py
index f39d8ad0c7..3a8be3f56e 100644
--- a/var/spack/repos/builtin/packages/nwchem/package.py
+++ b/var/spack/repos/builtin/packages/nwchem/package.py
@@ -68,7 +68,7 @@ class Nwchem(Package):
]
}
# Iterate over patches
- for condition, urls in urls_for_patches.iteritems():
+ for condition, urls in urls_for_patches.items():
for url, md5 in urls:
patch(url, when=condition, level=0, md5=md5)
diff --git a/var/spack/repos/builtin/packages/oce/package.py b/var/spack/repos/builtin/packages/oce/package.py
index c3488c137a..950621da40 100644
--- a/var/spack/repos/builtin/packages/oce/package.py
+++ b/var/spack/repos/builtin/packages/oce/package.py
@@ -32,6 +32,7 @@ class Oce(Package):
Open CASCADE library.
"""
homepage = "https://github.com/tpaviot/oce"
+ url = "https://github.com/tpaviot/oce/archive/OCE-0.18.tar.gz"
version('0.18', '226e45e77c16a4a6e127c71fefcd171410703960ae75c7ecc7eb68895446a993')
version('0.17.2', 'bf2226be4cd192606af677cf178088e5')
@@ -46,10 +47,6 @@ class Oce(Package):
depends_on('cmake@2.8:', type='build')
depends_on('tbb', when='+tbb')
- def url_for_version(self, version):
- return 'https://github.com/tpaviot/oce/archive/OCE-%s.tar.gz' % (
- version.dotted)
-
# There is a bug in OCE which appears with Clang (version?) or GCC 6.0
# and has to do with compiler optimization, see
# https://github.com/tpaviot/oce/issues/576
diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py
index 88350f50bc..8999b081e3 100644
--- a/var/spack/repos/builtin/packages/octopus/package.py
+++ b/var/spack/repos/builtin/packages/octopus/package.py
@@ -30,17 +30,11 @@ class Octopus(Package):
theory code."""
homepage = "http://www.tddft.org/programs/octopus/"
- base_url = "http://www.tddft.org/programs/octopus/down.php?file="
+ url = "http://www.tddft.org/programs/octopus/down.php?file=6.0/octopus-6.0.tar.gz"
version('6.0', '5d1168c2a8d7fd9cb9492eaebaa7182e')
version('5.0.1', '2b6392ab67b843f9d4ca7413fc07e822')
- # Sample url is:
- # "http://www.tddft.org/programs/octopus/down.php?file=5.0.1/octopus-5.0.1.tar.gz"
- def url_for_version(self, version):
- return '{0}/{1}/octopus-{1}.tar.gz'.format(Octopus.base_url,
- version.dotted)
-
variant('scalapack', default=False,
description='Compile with Scalapack')
variant('metis', default=True,
diff --git a/var/spack/repos/builtin/packages/oniguruma/package.py b/var/spack/repos/builtin/packages/oniguruma/package.py
new file mode 100644
index 0000000000..8a5b8005b6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/oniguruma/package.py
@@ -0,0 +1,34 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Oniguruma(AutotoolsPackage):
+ """Regular expression library."""
+
+ homepage = "https://github.com/kkos/oniguruma"
+ url = "https://github.com/kkos/oniguruma/releases/download/v6.1.3/onig-6.1.3.tar.gz"
+
+ version('6.1.3', '2d105d352c3f852d662414f639e7e859')
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index b6a81c2055..6e16174563 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -59,6 +59,8 @@ class Openblas(MakefilePackage):
parallel = False
+ conflicts('%intel@16', when='@0.2.15:0.2.19')
+
@run_before('edit')
def check_compilers(self):
# As of 06/2016 there is no mechanism to specify that packages which
@@ -76,6 +78,12 @@ class Openblas(MakefilePackage):
'OpenBLAS does not support OpenMP with clang!'
)
+ spec = self.spec
+ if spec.satisfies('%clang@8.1.0:') and spec.satisfies('@:0.2.19'):
+ raise InstallError(
+ 'OpenBLAS @:0.2.19 does not build with Apple clang@8.1.0:'
+ )
+
@property
def make_defs(self):
# Configure fails to pick up fortran from FC=/abs/path/to/f77, but
diff --git a/var/spack/repos/builtin/packages/openfoam-com/openfoam-bin-1612.patch b/var/spack/repos/builtin/packages/openfoam-com/openfoam-bin-1612.patch
new file mode 100644
index 0000000000..b9e87a7ec8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/openfoam-bin-1612.patch
@@ -0,0 +1,503 @@
+--- OpenFOAM-v1612+.orig/bin/foamEtcFile 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/bin/foamEtcFile 2017-03-23 10:08:37.296887070 +0100
+@@ -4,7 +4,7 @@
+ # \\ / F ield | OpenFOAM: The Open Source CFD Toolbox
+ # \\ / O peration |
+ # \\ / A nd | Copyright (C) 2011-2016 OpenFOAM Foundation
+-# \\/ M anipulation |
++# \\/ M anipulation | Copyright (C) 2017 OpenCFD Ltd.
+ #-------------------------------------------------------------------------------
+ # License
+ # This file is part of OpenFOAM.
+@@ -26,7 +26,7 @@
+ # foamEtcFile
+ #
+ # Description
+-# Locate user/group/shipped file with semantics similar to the
++# Locate user/group/other files with semantics similar to the
+ # ~OpenFOAM/fileName expansion.
+ #
+ # The -mode option can be used to allow chaining from
+@@ -34,40 +34,53 @@
+ #
+ # For example, within the user ~/.OpenFOAM/<VER>/prefs.sh:
+ # \code
+-# foamPrefs=`$WM_PROJECT_DIR/bin/foamEtcFile -m go prefs.sh` \
+-# && _foamSource $foamPrefs
++# eval $(foamEtcFile -sh -mode=go prefs.sh)
+ # \endcode
+ #
++# Environment
++# - WM_PROJECT: (unset defaults to OpenFOAM)
++# - WM_PROJECT_SITE: (unset defaults to PREFIX/site)
++# - WM_PROJECT_VERSION: (unset defaults to detect from path)
++#
+ # Note
+-# This script must exist in $FOAM_INST_DIR/OpenFOAM-<VERSION>/bin/
+-# or $FOAM_INST_DIR/openfoam<VERSION>/bin/ (for the debian version)
++# This script must exist in one of these locations:
++# - $WM_PROJECT_INST_DIR/OpenFOAM-<VERSION>/bin
++# - $WM_PROJECT_INST_DIR/openfoam-<VERSION>/bin
++# - $WM_PROJECT_INST_DIR/OpenFOAM+<VERSION>/bin
++# - $WM_PROJECT_INST_DIR/openfoam+<VERSION>/bin
++# - $WM_PROJECT_INST_DIR/openfoam<VERSION>/bin (debian version)
+ #
+ #-------------------------------------------------------------------------------
++unset optQuiet optSilent
+ usage() {
+ [ "${optQuiet:-$optSilent}" = true ] && exit 1
+-
+ exec 1>&2
+ while [ "$#" -ge 1 ]; do echo "$1"; shift; done
+ cat<<USAGE
+
+-Usage: ${0##*/} [OPTION] fileName
+- ${0##*/} [OPTION] -list
++Usage: foamEtcFile [OPTION] fileName
++ foamEtcFile [OPTION] [-list|-list-test] [fileName]
+ options:
+- -all return all files (otherwise stop after the first match)
+- -list list the directories to be searched
+- -mode <mode> any combination of u(user), g(group), o(other)
+- -prefix <dir> specify an alternative installation prefix
+- -quiet suppress all normal output
+- -silent suppress all stderr output
+- -version <ver> specify an alternative OpenFOAM version
+- in the form Maj.Min.Rev (eg, 1.7.0)
+- -help print the usage
++ -a, -all Return all files (otherwise stop after the first match)
++ -l, -list List directories or files to be checked
++ -list-test List (existing) directories or files to be checked
++ -mode=MODE Any combination of u(user), g(group), o(other)
++ -prefix=DIR Specify an alternative installation prefix
++ -version=VER Specify alternative OpenFOAM version (eg, 3.0, 1612, ...)
++ -csh | -sh Produce output suitable for a csh or sh 'eval'
++ -csh-verbose | -sh-verbose
++ As per -csh | -sh, with additional verbosity
++ -q, -quiet Suppress all normal output
++ -s, -silent Suppress stderr, except -csh-verbose, -sh-verbose output
++ -help Print the usage
+
+- Locate user/group/shipped file with semantics similar to the
++ Locate user/group/other file with semantics similar to the
+ ~OpenFOAM/fileName expansion.
+
+- The options can also be specified as a single character
+- (eg, '-q' instead of '-quiet'), but must not be grouped.
++ Single character options must not be grouped. Equivalent options:
++ -mode=MODE, -mode MODE, -m MODE
++ -prefix=DIR, -prefix DIR, -p DIR
++ -version=VER, -version VER, -v VER
+
+ Exit status
+ 0 when the file is found. Print resolved path to stdout.
+@@ -78,61 +91,117 @@
+ exit 1
+ }
+
+-#-------------------------------------------------------------------------------
++# Report error and exit
++die()
++{
++ [ "${optQuiet:-$optSilent}" = true ] && exit 1
++ exec 1>&2
++ echo
++ echo "Error encountered:"
++ while [ "$#" -ge 1 ]; do echo " $1"; shift; done
++ echo
++ echo "See 'foamEtcFile -help' for usage"
++ echo
++ exit 1
++}
+
+-# the bin dir:
+-binDir="${0%/*}"
++#-------------------------------------------------------------------------------
++binDir="${0%/*}" # The bin dir
++projectDir="${binDir%/bin}" # The project dir
++prefixDir="${projectDir%/*}" # The prefix dir (same as $WM_PROJECT_INST_DIR)
+
+-# the project dir:
++# Could not resolve projectDir, prefixDir? (eg, called as ./bin/foamEtcFile)
++if [ "$prefixDir" = "$projectDir" ]
++then
++ binDir="$(cd $binDir && pwd -L)"
+ projectDir="${binDir%/bin}"
+-
+-# the prefix dir (same as $FOAM_INST_DIR):
+ prefixDir="${projectDir%/*}"
++fi
++projectDirName="${projectDir##*/}" # The project directory name
+
+-# the name used for the project directory
+-projectDirName="${projectDir##*/}"
++projectName="${WM_PROJECT:-OpenFOAM}" # The project name
++projectVersion="$WM_PROJECT_VERSION" # Empty? - will be treated later
+
+-# version number used for debian packaging
+-unset versionNum
+
++#-------------------------------------------------------------------------------
++
++# Guess project version or simply get the stem part of the projectDirName.
++# Handle standard and debian naming conventions.
+ #
+-# handle standard and debian naming convention
++# - projectVersion: update unless already set
+ #
+-case "$projectDirName" in
+-OpenFOAM-*) # standard naming convention OpenFOAM-<VERSION>
+- version="${projectDirName##OpenFOAM-}"
+- ;;
++# Helper variables:
++# - dirBase (for reassembling name) == projectDirName without the version
++# - versionNum (debian packaging)
++unset dirBase versionNum
++guessVersion()
++{
++ local version
+
+-openfoam[0-9]* | openfoam-dev) # debian naming convention 'openfoam<VERSION>'
+- versionNum="${projectDirName##openfoam}"
+- case "$versionNum" in
+- ??) # convert 2 digit version number to decimal delineated
+- version=$(echo "$versionNum" | sed -e 's@\(.\)\(.\)@\1.\2@')
+- ;;
+- ???) # convert 3 digit version number to decimal delineated
+- version=$(echo "$versionNum" | sed -e 's@\(.\)\(.\)\(.\)@\1.\2.\3@')
+- ;;
+- ????) # convert 4 digit version number to decimal delineated
+- version=$(echo "$versionNum" | sed -e 's@\(.\)\(.\)\(.\)\(.\)@\1.\2.\3.\4@')
+- ;;
+- *) # failback - use current environment setting
+- version="$WM_PROJECT_VERSION"
++ case "$projectDirName" in
++ (OpenFOAM-* | openfoam-*)
++ # Standard naming: OpenFOAM-<VERSION> or openfoam-<VERSION>
++ dirBase="${projectDirName%%-*}-"
++ version="${projectDirName#*-}"
++ version="${version%%*-}" # Extra safety, eg openfoam-version-packager
++ ;;
++
++ (OpenFOAM+* | openfoam+*)
++ # Alternative naming: OpenFOAM+<VERSION> or openfoam+<VERSION>
++ dirBase="${projectDirName%%+*}+"
++ version="${projectDirName#*+}"
++ version="${version%%*-}" # Extra safety, eg openfoam-version-packager
++ ;;
++
++ (openfoam[0-9]*)
++ # Debian naming: openfoam<VERSION>
++ dirBase="openfoam"
++ version="${projectDirName#openfoam}"
++ versionNum="$version"
++
++ # Convert digits version number to decimal delineated
++ case "${#versionNum}" in (2|3|4)
++ version=$(echo "$versionNum" | sed -e 's@\([0-9]\)@\1.@g')
++ version="${version%.}"
+ ;;
+ esac
++
++ # Ignore special treatment if no decimals were inserted.
++ [ "${#version}" -gt "${#versionNum}" ] || unset versionNum
+ ;;
+
+-*)
+- echo "Error : unknown/unsupported naming convention"
+- exit 1
++ (*)
++ die "unknown/unsupported naming convention for '$projectDirName'"
+ ;;
+ esac
+
++ # Set projectVersion if required
++ : ${projectVersion:=$version}
++}
++
++
++# Set projectVersion and update versionNum, projectDirName accordingly
++setVersion()
++{
++ projectVersion="$1"
++
++ # Need dirBase when reassembling projectDirName
++ [ -n "$dirBase" ] || guessVersion
++
++ # Debian: update x.y.z -> xyz version
++ if [ -n "$versionNum" ]
++ then
++ versionNum=$(echo "$projectVersion" | sed -e 's@\.@@g')
++ fi
++
++ projectDirName="$dirBase${versionNum:-$projectVersion}"
++}
++
+
+-# default mode is 'ugo'
+-mode=ugo
+-unset optAll optList optQuiet optSilent
++optMode=ugo # Default mode is always 'ugo'
++unset optAll optList optShell optVersion
+
+-# parse options
++# Parse options
+ while [ "$#" -gt 0 ]
+ do
+ case "$1" in
+@@ -141,27 +210,45 @@
+ ;;
+ -a | -all)
+ optAll=true
++ unset optShell
+ ;;
+ -l | -list)
+ optList=true
++ unset optShell
++ ;;
++ -list-test)
++ optList='test'
++ unset optShell
++ ;;
++ -csh | -sh | -csh-verbose | -sh-verbose)
++ optShell="${1#-}"
++ unset optAll
++ ;;
++ -mode=[ugo]*)
++ optMode="${1#*=}"
++ ;;
++ -prefix=/*)
++ prefixDir="${1#*=}"
++ prefixDir="${prefixDir%/}"
++ ;;
++ -version=*)
++ optVersion="${1#*=}"
+ ;;
+ -m | -mode)
+- [ "$#" -ge 2 ] || usage "'$1' option requires an argument"
+- mode="$2"
+-
+- # sanity check:
+- case "$mode" in
+- *u* | *g* | *o* )
++ optMode="$2"
++ shift
++ # Sanity check. Handles missing argument too.
++ case "$optMode" in
++ ([ugo]*)
+ ;;
+- *)
+- usage "'$1' option with invalid mode '$mode'"
++ (*)
++ die "invalid mode '$optMode'"
+ ;;
+ esac
+- shift
+ ;;
+ -p | -prefix)
+- [ "$#" -ge 2 ] || usage "'$1' option requires an argument"
+- prefixDir="$2"
++ [ "$#" -ge 2 ] || die "'$1' option requires an argument"
++ prefixDir="${2%/}"
+ shift
+ ;;
+ -q | -quiet)
+@@ -171,13 +258,8 @@
+ optSilent=true
+ ;;
+ -v | -version)
+- [ "$#" -ge 2 ] || usage "'$1' option requires an argument"
+- version="$2"
+- # convert x.y.z -> xyz version (if installation looked like debian)
+- if [ -n "$versionNum" ]
+- then
+- versionNum=$(echo "$version" | sed -e 's@\.@@g')
+- fi
++ [ "$#" -ge 2 ] || die "'$1' option requires an argument"
++ optVersion="$2"
+ shift
+ ;;
+ --)
+@@ -185,7 +267,7 @@
+ break
+ ;;
+ -*)
+- usage "unknown option: '$*'"
++ die "unknown option: '$1'"
+ ;;
+ *)
+ break
+@@ -195,11 +277,28 @@
+ done
+
+
+-# debugging:
+-# echo "Installed locations:"
+-# for i in projectDir prefixDir projectDirName version versionNum
++#-------------------------------------------------------------------------------
++
++if [ -n "$optVersion" ]
++then
++ setVersion $optVersion
++elif [ -z "$projectVersion" ]
++then
++ guessVersion
++fi
++
++# Updates:
++# - projectDir for changes via -prefix or -version
++# - projectSite for changes via -prefix
++projectDir="$prefixDir/$projectDirName"
++projectSite="${WM_PROJECT_SITE:-$prefixDir/site}"
++
++
++# Debugging:
++# echo "Installed locations:" 1>&2
++# for i in projectDir prefixDir projectDirName projectVersion
+ # do
+-# eval echo "$i=\$$i"
++# eval echo "$i=\$$i" 1>&2
+ # done
+
+
+@@ -210,30 +309,18 @@
+
+ # Define the various places to be searched:
+ unset dirList
+-case "$mode" in
+-*u*) # user
+- userDir="$HOME/.${WM_PROJECT:-OpenFOAM}"
+- dirList="$dirList $userDir/$version $userDir"
++case "$optMode" in (*u*) # (U)ser
++ dirList="$dirList $HOME/.$projectName/$projectVersion $HOME/.$projectName"
+ ;;
+ esac
+
+-case "$mode" in
+-*g*) # group (site)
+- siteDir="${WM_PROJECT_SITE:-$prefixDir/site}"
+- dirList="$dirList $siteDir/$version $siteDir"
++case "$optMode" in (*g*) # (G)roup == site
++ dirList="$dirList $projectSite/$projectVersion $projectSite"
+ ;;
+ esac
+
+-case "$mode" in
+-*o*) # other (shipped)
+- if [ -n "$versionNum" ]
+- then
+- # debian packaging
+- dirList="$dirList $prefixDir/openfoam$versionNum/etc"
+- else
+- # standard packaging
+- dirList="$dirList $prefixDir/${WM_PROJECT:-OpenFOAM}-$version/etc"
+- fi
++case "$optMode" in (*o*) # (O)ther == shipped
++ dirList="$dirList $projectDir/etc"
+ ;;
+ esac
+ set -- $dirList
+@@ -244,50 +331,87 @@
+ #
+
+ exitCode=0
+-if [ "$optList" = true ]
++if [ -n "$optList" ]
+ then
+
+- # list directories, or potential file locations
+- [ "$nArgs" -le 1 ] || usage
++ # List directories, or potential file locations
++ [ "$nArgs" -le 1 ] || \
++ die "-list expects 0 or 1 filename, but $nArgs provided"
++
++ # A silly combination, but -quiet does have precedence
++ [ -n "$optQuiet" ] && exit 0
+
+- # a silly combination, but -quiet does have precedence
+- [ "$optQuiet" = true ] && exit 0
++ # Test for directory or file too?
++ if [ "$optList" = "test" ]
++ then
++ exitCode=2 # Fallback to a general error (file not found)
+
++ if [ "$nArgs" -eq 1 ]
++ then
+ for dir
+ do
+- if [ "$nArgs" -eq 1 ]
++ resolved="$dir/$fileName"
++ if [ -f "$resolved" ]
+ then
+- echo "$dir/$fileName"
++ echo "$resolved"
++ exitCode=0 # OK
++ fi
++ done
+ else
++ for dir
++ do
++ if [ -d "$dir" ]
++ then
+ echo "$dir"
++ exitCode=0 # OK
+ fi
+ done
++ fi
++ else
++ for dir
++ do
++ echo "$dir${fileName:+/}$fileName"
++ done
++ fi
+
+ else
+
+- [ "$nArgs" -eq 1 ] || usage
++ [ "$nArgs" -eq 1 ] || die "One filename expected - $nArgs provided"
+
+- # general error, eg file not found
+- exitCode=2
++ exitCode=2 # Fallback to a general error (file not found)
+
+ for dir
+ do
+ if [ -f "$dir/$fileName" ]
+ then
+ exitCode=0
+- if [ "$optQuiet" = true ]
+- then
++ [ -n "$optQuiet" ] && break
++
++ case "$optShell" in
++ (*verbose)
++ echo "Using: $dir/$fileName" 1>&2
++ ;;
++ esac
++
++ case "$optShell" in
++ csh*)
++ echo "source $dir/$fileName"
+ break
+- else
++ ;;
++ sh*)
++ echo ". $dir/$fileName"
++ break
++ ;;
++ *)
+ echo "$dir/$fileName"
+- [ "$optAll" = true ] || break
+- fi
++ [ -n "$optAll" ] || break
++ ;;
++ esac
+ fi
+ done
+
+ fi
+
+-
+ exit $exitCode
+
+ #------------------------------------------------------------------------------
diff --git a/var/spack/repos/builtin/packages/openfoam-com/openfoam-build-1612.patch b/var/spack/repos/builtin/packages/openfoam-com/openfoam-build-1612.patch
new file mode 100644
index 0000000000..26e2d8f085
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/openfoam-build-1612.patch
@@ -0,0 +1,17 @@
+--- OpenFOAM-v1612+.orig/Allwmake 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/Allwmake 2017-03-29 09:08:15.503865203 +0200
+@@ -17,6 +17,14 @@
+ exit 1
+ }
+
++#------------------------------------------------------------------------------
++echo "========================================"
++date "+%Y-%m-%d %H:%M:%S %z" 2>/dev/null || echo "date is unknown"
++echo "Starting ${WM_PROJECT_DIR##*/} ${0##*}"
++echo " $WM_COMPILER $WM_COMPILER_TYPE compiler"
++echo " ${WM_OPTIONS}, with ${WM_MPLIB} ${FOAM_MPI}"
++echo
++
+ # Compile wmake support applications
+ (cd wmake/src && make)
+
diff --git a/var/spack/repos/builtin/packages/openfoam-com/openfoam-etc-1612.patch b/var/spack/repos/builtin/packages/openfoam-com/openfoam-etc-1612.patch
new file mode 100644
index 0000000000..dd8146e953
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/openfoam-etc-1612.patch
@@ -0,0 +1,41 @@
+--- OpenFOAM-v1612+.orig/etc/bashrc 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/bashrc 2017-03-22 16:05:05.751237072 +0100
+@@ -42,7 +42,8 @@
+ #
+ # Please set to the appropriate path if the default is not correct.
+ #
+-[ $BASH_SOURCE ] && FOAM_INST_DIR=$(\cd ${BASH_SOURCE%/*/*/*} && \pwd -P) || \
++rc="${BASH_SOURCE:-${ZSH_NAME:+$0}}"
++[ -n "$rc" ] && FOAM_INST_DIR=$(\cd $(dirname $rc)/../.. && \pwd -L) || \
+ FOAM_INST_DIR=$HOME/$WM_PROJECT
+ # FOAM_INST_DIR=~$WM_PROJECT
+ # FOAM_INST_DIR=/opt/$WM_PROJECT
+@@ -135,8 +136,10 @@
+ # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+ . $WM_PROJECT_DIR/etc/config.sh/functions
+
+-# Add in preset user or site preferences:
+-_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile prefs.sh`
++# Override definitions via prefs, with 'other' first so the sys-admin
++# can provide base values independent of WM_PROJECT_SITE
++_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile -mode o prefs.sh`
++_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile -mode ug prefs.sh`
+
+ # Evaluate command-line parameters and record settings for later
+ # these can be used to set/unset values, or specify alternative pref files
+diff -uw OpenFOAM-v1612+.orig/etc/cshrc OpenFOAM-v1612+/etc/cshrc
+--- OpenFOAM-v1612+.orig/etc/cshrc 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/cshrc 2017-03-22 16:04:51.839291067 +0100
+@@ -148,8 +148,10 @@
+ # Source files, possibly with some verbosity
+ alias _foamSource 'if ($?FOAM_VERBOSE && $?prompt) echo "Sourcing: \!*"; if (\!* != "") source \!*'
+
+-# Add in preset user or site preferences:
+-_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile prefs.csh`
++# Override definitions via prefs, with 'other' first so the sys-admin
++# can provide base values independent of WM_PROJECT_SITE
++_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile -mode o prefs.csh`
++_foamSource `$WM_PROJECT_DIR/bin/foamEtcFile -mode ug prefs.csh`
+
+ # Evaluate command-line parameters and record settings for later
+ # these can be used to set/unset values, or specify alternative pref files
diff --git a/var/spack/repos/builtin/packages/openfoam-com/openfoam-mpi-1612.patch b/var/spack/repos/builtin/packages/openfoam-com/openfoam-mpi-1612.patch
new file mode 100644
index 0000000000..b3663b0a49
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/openfoam-mpi-1612.patch
@@ -0,0 +1,36 @@
+--- OpenFOAM-v1612+.orig/etc/config.sh/mpi 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/config.sh/mpi 2017-03-29 13:55:57.507980699 +0200
+@@ -75,8 +75,15 @@
+ _foamAddMan $MPI_ARCH_PATH/share/man
+ ;;
+
++USERMPI)
++ # Use an arbitrary, user-specified mpi implementation
++ export FOAM_MPI=mpi-user
++ _foamSource `$WM_PROJECT_DIR/bin/foamEtcFile config.sh/mpi-user`
++ ;;
++
+ SYSTEMMPI)
+ export FOAM_MPI=mpi-system
++ _foamSource `$WM_PROJECT_DIR/bin/foamEtcFile config.sh/mpi-system`
+
+ if [ -z "$MPI_ROOT" ]
+ then
+--- OpenFOAM-v1612+.orig/etc/config.csh/mpi 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/config.csh/mpi 2017-03-29 13:56:36.347835938 +0200
+@@ -71,8 +71,15 @@
+ _foamAddMan $MPI_ARCH_PATH/share/man
+ breaksw
+
++case USERMPI:
++ # Use an arbitrary, user-specified mpi implementation
++ setenv FOAM_MPI mpi-user
++ _foamSource `$WM_PROJECT_DIR/bin/foamEtcFile config.csh/mpi-user`
++ breaksw
++
+ case SYSTEMMPI:
+ setenv FOAM_MPI mpi-system
++ _foamSource `$WM_PROJECT_DIR/bin/foamEtcFile config.csh/mpi-system`
+
+ if ( ! ($?MPI_ROOT) ) then
+ echo
diff --git a/var/spack/repos/builtin/packages/openfoam-com/openfoam-site.patch b/var/spack/repos/builtin/packages/openfoam-com/openfoam-site.patch
new file mode 100644
index 0000000000..6631025788
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/openfoam-site.patch
@@ -0,0 +1,42 @@
+diff -uw OpenFOAM-v1612+.orig/etc/config.sh/settings OpenFOAM-v1612+/etc/config.sh/settings
+--- OpenFOAM-v1612+.orig/etc/config.sh/settings 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/config.sh/settings 2017-03-23 12:22:52.002101020 +0100
+@@ -141,7 +141,7 @@
+ #------------------------------------------------------------------------------
+
+ # Location of the jobControl directory
+-export FOAM_JOB_DIR=$WM_PROJECT_INST_DIR/jobControl
++export FOAM_JOB_DIR=$HOME/$WM_PROJECT/jobControl #SPACK: non-central location
+
+ # wmake configuration
+ export WM_DIR=$WM_PROJECT_DIR/wmake
+@@ -157,7 +157,7 @@
+ export FOAM_EXT_LIBBIN=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION/lib
+
+ # Site-specific directory
+-siteDir="${WM_PROJECT_SITE:-$WM_PROJECT_INST_DIR/site}"
++siteDir="${WM_PROJECT_SITE:-$WM_PROJECT/site}" #SPACK: not in parent directory
+
+ # Shared site executables/libraries
+ # Similar naming convention as ~OpenFOAM expansion
+diff -uw OpenFOAM-v1612+.orig/etc/config.csh/settings OpenFOAM-v1612+/etc/config.csh/settings
+--- OpenFOAM-v1612+.orig/etc/config.csh/settings 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/config.csh/settings 2017-03-23 12:23:52.737891912 +0100
+@@ -137,7 +137,7 @@
+ #------------------------------------------------------------------------------
+
+ # Location of the jobControl directory
+-setenv FOAM_JOB_DIR $WM_PROJECT_INST_DIR/jobControl
++setenv FOAM_JOB_DIR=$HOME/$WM_PROJECT/jobControl #SPACK: non-central location
+
+ # wmake configuration
+ setenv WM_DIR $WM_PROJECT_DIR/wmake
+@@ -156,7 +156,7 @@
+ if ( $?WM_PROJECT_SITE ) then
+ set siteDir=$WM_PROJECT_SITE
+ else
+- set siteDir=$WM_PROJECT_INST_DIR/site
++ set siteDir=$WM_PROJECT_DIR/site #SPACK: not in parent directory
+ endif
+
+ # Shared site executables/libraries
diff --git a/var/spack/repos/builtin/packages/openfoam-com/package.py b/var/spack/repos/builtin/packages/openfoam-com/package.py
new file mode 100644
index 0000000000..fcd33eadcb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/package.py
@@ -0,0 +1,722 @@
+##############################################################################
+# Copyright (c) 2017 Mark Olesen, OpenCFD Ltd.
+#
+# This file was authored by Mark Olesen <mark.olesen@esi-group.com>
+# and is released as part of spack under the LGPL license.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for the LLNL notice and the LGPL.
+#
+# License
+# -------
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# Legal Notice
+# ------------
+# OPENFOAM is a trademark owned by OpenCFD Ltd
+# (producer and distributor of the OpenFOAM software via www.openfoam.com).
+# The trademark information must remain visible and unadulterated in this
+# file and via the "spack info" and comply with the term set by
+# http://openfoam.com/legal/trademark-policy.php
+#
+# This file is not part of OpenFOAM, nor does it constitute a component of an
+# OpenFOAM distribution.
+#
+##############################################################################
+#
+# Notes
+# - mpi handling: WM_MPLIB=USERMPI and use spack to populate an appropriate
+# configuration and generate wmake rules for 'USER' and 'USERMPI'
+# mpi implementations.
+#
+# - Resolution of flex, zlib needs more attention (within OpenFOAM)
+# - +paraview:
+# depends_on should just be 'paraview+plugins' but that resolves poorly.
+# Workaround: use preferred variants "+plugins +qt"
+# packages:
+# paraview:
+# variants: +plugins +qt
+# in ~/.spack/packages.yaml
+#
+# - Combining +zoltan with +int64 has not been tested, but probably won't work.
+#
+##############################################################################
+from spack import *
+from spack.environment import *
+
+import glob
+import re
+import shutil
+import os
+from os.path import isdir, isfile
+
+# Not the nice way of doing things, but is a start for refactoring
+__all__ = [
+ 'format_export',
+ 'format_setenv',
+ 'write_environ',
+ 'rewrite_environ_files',
+ 'mplib_content',
+ 'generate_mplib_rules',
+ 'generate_compiler_rules',
+]
+
+
+def format_export(key, value):
+ """Format key,value pair as 'export' with newline for POSIX shell."""
+ return 'export {0}={1}\n'.format(key, value)
+
+
+def format_setenv(key, value):
+ """Format key,value pair as 'setenv' with newline for C-shell."""
+ return 'setenv {0} {1}\n'.format(key, value)
+
+
+def _write_environ_entries(outfile, environ, formatter):
+ """Write environment settings as 'export' or 'setenv'.
+ If environ is a dict, write in sorted order.
+ If environ is a list, write pair-wise.
+ Also descends into sub-dict and sub-list, but drops the key.
+ """
+ if isinstance(environ, dict):
+ for key in sorted(environ):
+ entry = environ[key]
+ if isinstance(entry, dict):
+ _write_environ_entries(outfile, entry, formatter)
+ elif isinstance(entry, list):
+ _write_environ_entries(outfile, entry, formatter)
+ else:
+ outfile.write(formatter(key, entry))
+ elif isinstance(environ, list):
+ for item in environ:
+ outfile.write(formatter(item[0], item[1]))
+
+
+def _write_environ_file(output, environ, formatter):
+ """Write environment settings as 'export' or 'setenv'.
+ If environ is a dict, write in sorted order.
+ If environ is a list, write pair-wise.
+ Also descends into sub-dict and sub-list, but drops the key.
+ """
+ with open(output, 'w') as outfile:
+ outfile.write('# SPACK settings\n\n')
+ _write_environ_entries(outfile, environ, formatter)
+
+
+def write_environ(environ, **kwargs):
+ """Write environment settings as 'export' or 'setenv'.
+ If environ is a dict, write in sorted order.
+ If environ is a list, write pair-wise.
+
+ Keyword Options:
+ posix[=None] If set, the name of the POSIX file to rewrite.
+ cshell[=None] If set, the name of the C-shell file to rewrite.
+ """
+ posix = kwargs.get('posix', None)
+ if posix:
+ _write_environ_file(posix, environ, format_export)
+ cshell = kwargs.get('cshell', None)
+ if cshell:
+ _write_environ_file(cshell, environ, format_setenv)
+
+
+def rewrite_environ_files(environ, **kwargs):
+ """Use filter_file to rewrite (existing) POSIX shell or C-shell files.
+ Keyword Options:
+ posix[=None] If set, the name of the POSIX file to rewrite.
+ cshell[=None] If set, the name of the C-shell file to rewrite.
+ """
+ posix = kwargs.get('posix', None)
+ if posix and isfile(posix):
+ for k, v in environ.iteritems():
+ filter_file(
+ r'^(\s*export\s+%s)=.*$' % k,
+ r'\1=%s' % v,
+ posix,
+ backup=False)
+ cshell = kwargs.get('cshell', None)
+ if cshell and isfile(cshell):
+ for k, v in environ.iteritems():
+ filter_file(
+ r'^(\s*setenv\s+%s)\s+.*$' % k,
+ r'\1 %s' % v,
+ cshell,
+ backup=False)
+
+
+def pkglib(package):
+ """Get lib64 or lib from package prefix"""
+ libdir = package.prefix.lib64
+ if isdir(libdir):
+ return libdir
+ return package.prefix.lib
+
+
+def mplib_content(spec, pre=None):
+ """The mpi settings to have wmake
+ use spack information with minimum modifications to OpenFOAM.
+
+ Optional parameter 'pre' to provid alternative prefix
+ """
+ mpi_spec = spec['mpi']
+ bin = mpi_spec.prefix.bin
+ inc = mpi_spec.prefix.include
+ lib = pkglib(mpi_spec)
+ if pre:
+ bin = join_path(pre, os.path.basename(bin))
+ inc = join_path(pre, os.path.basename(inc))
+ lib = join_path(pre, os.path.basename(lib))
+ else:
+ pre = mpi_spec.prefix
+
+ info = {
+ 'name': '{0}-{1}'.format(mpi_spec.name, mpi_spec.version),
+ 'prefix': pre,
+ 'include': inc,
+ 'bindir': bin,
+ 'libdir': lib,
+ 'FLAGS': '-DOMPI_SKIP_MPICXX -DMPICH_IGNORE_CXX_SEEK',
+ 'PINC': '-I{0}'.format(inc),
+ 'PLIBS': '-L{0} -lmpi'.format(lib),
+ }
+ return info
+
+
+def generate_mplib_rules(directory, spec):
+ """ Create mplibUSER,mplibUSERMPI rules in the specified directory"""
+ content = mplib_content(spec)
+ with working_dir(directory):
+ for mplib in ['mplibUSER', 'mplibUSERMPI']:
+ with open(mplib, 'w') as out:
+ out.write("""# Use mpi from spack ({name})\n
+PFLAGS = {FLAGS}
+PINC = {PINC}
+PLIBS = {PLIBS}
+""".format(**content))
+
+
+def generate_compiler_rules(directory, compOpt, value):
+ """ Create cSPACKOpt,c++SPACKOpt rules in the specified directory.
+ The file content is copied and filtered from the corresponding
+ cOpt,c++Opt rules"""
+ # Compiler options for SPACK - eg, wmake/rules/linux64Gcc/
+ # Copy from existing cOpt, c++Opt and modify DBUG value
+ with working_dir(directory):
+ for lang in ['c', 'c++']:
+ src = '{0}Opt'.format(lang)
+ dst = '{0}{1}'.format(lang, compOpt)
+ shutil.copyfile(src, dst) # src -> dst
+ filter_file(
+ r'^(\S+DBUG\s*)=.*$',
+ r'\1= %s' % value,
+ dst,
+ backup=False)
+
+
+class OpenfoamCom(Package):
+ """OpenFOAM is a GPL-opensource C++ CFD-toolbox.
+ This offering is supported by OpenCFD Ltd,
+ producer and distributor of the OpenFOAM software via www.openfoam.com,
+ and owner of the OPENFOAM trademark.
+ OpenCFD Ltd has been developing and releasing OpenFOAM since its debut
+ in 2004.
+ """
+
+ homepage = "http://www.openfoam.com/"
+ baseurl = "https://sourceforge.net/projects/openfoamplus/files"
+
+ version('1612', 'ca02c491369150ab127cbb88ec60fbdf',
+ url=baseurl + '/v1612+/OpenFOAM-v1612+.tgz')
+
+ variant('int64', default=False,
+ description='Compile with 64-bit labels')
+ variant('float32', default=False,
+ description='Compile with 32-bit scalar (single-precision)')
+ variant('knl', default=False,
+ description='Use KNL compiler settings')
+
+ variant('scotch', default=True,
+ description='With scotch/ptscotch for decomposition')
+ variant('metis', default=False,
+ description='With metis for decomposition')
+ variant('zoltan', default=False,
+ description='With zoltan renumbering')
+ # TODO?# variant('parmgridgen', default=True,
+ # TODO?# description='With parmgridgen support')
+ variant('source', default=True,
+ description='Install library/application sources and tutorials')
+
+ variant('paraview', default=True,
+ description='Build paraview plugins and runtime post-processing')
+
+ #: Map spack compiler names to OpenFOAM compiler names
+ # By default, simply capitalize the first letter
+ compiler_mapping = {'intel': 'icc'}
+
+ provides('openfoam')
+ depends_on('mpi')
+ depends_on('zlib')
+ depends_on('fftw')
+ depends_on('boost')
+ depends_on('cgal')
+ depends_on('flex@:2.6.1') # <- restriction due to scotch
+ depends_on('cmake', type='build')
+
+ # Require scotch with ptscotch - corresponds to standard OpenFOAM setup
+ depends_on('scotch~int64+mpi', when='+scotch~int64')
+ depends_on('scotch+int64+mpi', when='+scotch+int64')
+ depends_on('metis@5:', when='+metis')
+ depends_on('metis+int64', when='+metis+int64')
+ depends_on('parmgridgen', when='+parmgridgen')
+ depends_on('zoltan', when='+zoltan')
+
+ # For OpenFOAM plugins and run-time post-processing this should just be
+ # 'paraview+plugins' but that resolves poorly.
+ # Workaround: use preferred variants "+plugins +qt" in
+ # ~/.spack/packages.yaml
+
+ # 1612 plugins need older paraview
+ # The native reader in paraview 5.2 is broken, so start after that
+ depends_on('paraview@:5.0.1', when='@:1612+paraview')
+ depends_on('paraview@5.3:', when='@1706:+paraview')
+
+ # General patches
+ patch('openfoam-site.patch')
+
+ # Version-specific patches
+ patch('openfoam-bin-1612.patch', when='@1612')
+ patch('openfoam-etc-1612.patch', when='@1612')
+ patch('openfoam-mpi-1612.patch', when='@1612')
+ patch('openfoam-build-1612.patch', when='@1612')
+ patch('scotch-metis-lib-1612.patch', when='@1612')
+ patch('zoltan-lib-1612.patch', when='@1612')
+
+ # Some user settings, to be adjusted manually or via variants
+ foam_cfg = {
+ 'WM_COMPILER': 'Gcc', # <- %compiler
+ 'WM_ARCH_OPTION': '64', # (32/64-bit on x86_64)
+ 'WM_LABEL_SIZE': '32', # <- +int64
+ 'WM_PRECISION_OPTION': 'DP', # <- +float32
+ 'WM_COMPILE_OPTION': 'SPACKOpt', # Do not change
+ 'WM_MPLIB': 'USERMPI', # Use user mpi for spack
+ }
+
+ # The system description is frequently needed
+ foam_sys = {
+ 'WM_ARCH': None,
+ 'WM_COMPILER': None,
+ 'WM_OPTIONS': None,
+ }
+
+ # Content for etc/prefs.{csh,sh}
+ etc_prefs = {}
+
+ # Content for etc/config.{csh,sh}/ files
+ etc_config = {}
+
+ build_script = './spack-Allwmake' # <- Generated by patch() method.
+ # phases = ['configure', 'build', 'install']
+ # build_system_class = 'OpenfoamCom'
+
+ # Add symlinks into bin/, lib/ (eg, for other applications)
+ extra_symlinks = False
+
+ # Quickly enable/disable testing with the current develop branch
+ if False:
+ version(
+ 'plus',
+ branch='develop',
+ git='file://{0}/{1}'
+ .format(os.path.expanduser("~"), 'openfoam/OpenFOAM-plus/.git'))
+
+ def setup_environment(self, spack_env, run_env):
+ run_env.set('WM_PROJECT_DIR', self.projectdir)
+
+ @property
+ def projectdir(self):
+ """Absolute location of project directory: WM_PROJECT_DIR/"""
+ return self.prefix # <- install directly under prefix
+
+ @property
+ def etc(self):
+ """Absolute location of the OpenFOAM etc/ directory"""
+ return join_path(self.projectdir, 'etc')
+
+ @property
+ def archbin(self):
+ """Relative location of architecture-specific executables"""
+ return join_path('platforms', self.wm_options, 'bin')
+
+ @property
+ def archlib(self):
+ """Relative location of architecture-specific libraries"""
+ return join_path('platforms', self.wm_options, 'lib')
+
+ @property
+ def wm_options(self):
+ """The architecture+compiler+options for OpenFOAM"""
+ opts = self.set_openfoam()
+ return opts
+
+ @property
+ def rpath_info(self):
+ """Define 'SPACKOpt' compiler optimization file to have wmake
+ use spack information with minimum modifications to OpenFOAM
+ """
+ build_libpath = join_path(self.stage.source_path, self.archlib)
+ install_libpath = join_path(self.projectdir, self.archlib)
+
+ # 'DBUG': rpaths
+ return '{0}{1} {2}{3}'.format(
+ self.compiler.cxx_rpath_arg, install_libpath,
+ self.compiler.cxx_rpath_arg, build_libpath)
+
+ def openfoam_arch(self):
+ """Return an architecture value similar to what OpenFOAM does in
+ etc/config.sh/settings, but slightly more generous.
+ Uses and may adjust foam_cfg[WM_ARCH_OPTION] as a side-effect
+ """
+ # spec.architecture.platform is like `uname -s`, but lower-case
+ platform = self.spec.architecture.platform
+
+ # spec.architecture.target is like `uname -m`
+ target = self.spec.architecture.target
+
+ if platform == 'linux':
+ if target == 'i686':
+ self.foam_cfg['WM_ARCH_OPTION'] = '32' # Force consistency
+ elif target == 'x86_64':
+ if self.foam_cfg['WM_ARCH_OPTION'] == '64':
+ platform += '64'
+ elif target == 'ia64':
+ platform += 'ia64'
+ elif target == 'armv7l':
+ platform += 'ARM7'
+ elif target == ppc64:
+ platform += 'PPC64'
+ elif target == ppc64le:
+ platform += 'PPC64le'
+ elif platform == 'darwin':
+ if target == 'x86_64':
+ platform += 'Intel'
+ if self.foam_cfg['WM_ARCH_OPTION'] == '64':
+ platform += '64'
+ # ... and others?
+ return platform
+
+ def openfoam_compiler(self):
+ """Capitalized version of the compiler name, which usually corresponds
+ to how OpenFOAM will camel-case things.
+ Use compiler_mapping to handing special cases.
+ Also handle special compiler options (eg, KNL)
+ """
+ comp = self.compiler.name
+ if comp in self.compiler_mapping:
+ comp = self.compiler_mapping[comp]
+ comp = comp.capitalize()
+
+ if '+knl' in self.spec:
+ comp += 'KNL'
+ return comp
+
+ def set_openfoam(self):
+ """Populate foam_cfg, foam_sys according to
+ variants, architecture, compiler.
+ Returns WM_OPTIONS.
+ """
+ # Run once
+ opts = self.foam_sys['WM_OPTIONS']
+ if opts:
+ return opts
+
+ wm_arch = self.openfoam_arch()
+ wm_compiler = self.openfoam_compiler()
+ compileOpt = self.foam_cfg['WM_COMPILE_OPTION']
+
+ # Insist on a wmake rule for this architecture/compiler combination
+ archCompiler = wm_arch + wm_compiler
+ compiler_rule = join_path(
+ self.stage.source_path, 'wmake', 'rules', archCompiler)
+
+ if not isdir(compiler_rule):
+ raise RuntimeError(
+ 'No wmake rule for {0}'.format(archCompiler))
+ if not re.match(r'.+Opt$', compileOpt):
+ raise RuntimeError(
+ "WM_COMPILE_OPTION={0} is not type '*Opt'".format(compileOpt))
+
+ # Adjust for variants
+ self.foam_cfg['WM_LABEL_SIZE'] = (
+ '64' if '+int64' in self.spec else '32'
+ )
+ self.foam_cfg['WM_PRECISION_OPTION'] = (
+ 'SP' if '+float32' in self.spec else 'DP'
+ )
+
+ # ----
+ # WM_LABEL_OPTION=Int$WM_LABEL_SIZE
+ # WM_OPTIONS=$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION$WM_COMPILE_OPTION
+ # ----
+ self.foam_sys['WM_ARCH'] = wm_arch
+ self.foam_sys['WM_COMPILER'] = wm_compiler
+ self.foam_cfg['WM_COMPILER'] = wm_compiler # For bashrc,cshrc too
+ self.foam_sys['WM_OPTIONS'] = ''.join([
+ wm_arch,
+ wm_compiler,
+ self.foam_cfg['WM_PRECISION_OPTION'],
+ 'Int', self.foam_cfg['WM_LABEL_SIZE'], # Int32/Int64
+ compileOpt
+ ])
+ return self.foam_sys['WM_OPTIONS']
+
+ def patch(self):
+ """Adjust OpenFOAM build for spack. Where needed, apply filter as an
+ alternative to normal patching.
+ """
+ self.set_openfoam() # May need foam_cfg/foam_sys information
+
+ # Avoid WM_PROJECT_INST_DIR for ThirdParty, site or jobControl.
+ # Use openfoam-site.patch to handle jobControl, site.
+ #
+ # Filter (not patch) bashrc,cshrc for additional flexibility
+ wm_setting = {
+ 'WM_THIRD_PARTY_DIR':
+ r'$WM_PROJECT_DIR/ThirdParty #SPACK: No separate third-party',
+ }
+
+ rewrite_environ_files( # Adjust etc/bashrc and etc/cshrc
+ wm_setting,
+ posix=join_path('etc', 'bashrc'),
+ cshell=join_path('etc', 'cshrc'))
+
+ # Adjust ParMGridGen - this is still a mess.
+ # We also have no assurances about sizes (int/long, float/double) etc.
+ #
+ # Need to adjust src/fvAgglomerationMethods/Allwmake
+ # "export ParMGridGen=%s" % spec['parmgridgen'].prefix
+ #
+ # and src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options
+ # "-I=%s" % spec['parmgridgen'].include
+ # "-L=%s -lmgrid" % spec['parmgridgen'].lib
+
+ # Build wrapper script
+ with open(self.build_script, 'w') as out:
+ out.write(
+ """#!/bin/bash
+. $PWD/etc/bashrc '' # No arguments
+mkdir -p $FOAM_APPBIN $FOAM_LIBBIN 2>/dev/null # Allow interrupt
+echo Build openfoam with SPACK
+echo WM_PROJECT_DIR = $WM_PROJECT_DIR
+./Allwmake $@
+#
+""")
+ set_executable(self.build_script)
+ self.configure(self.spec, self.prefix) # Should be a separate phase
+
+ def configure(self, spec, prefix):
+ """Make adjustments to the OpenFOAM configuration files in their various
+ locations: etc/bashrc, etc/config.sh/FEATURE and customizations that
+ don't properly fit get placed in the etc/prefs.sh file (similiarly for
+ csh).
+ """
+ self.set_openfoam() # Need foam_cfg/foam_sys information
+
+ # Some settings for filtering bashrc, cshrc
+ wm_setting = {}
+ wm_setting.update(self.foam_cfg)
+
+ rewrite_environ_files( # Adjust etc/bashrc and etc/cshrc
+ wm_setting,
+ posix=join_path('etc', 'bashrc'),
+ cshell=join_path('etc', 'cshrc'))
+
+ # Content for etc/prefs.{csh,sh}
+ self.etc_prefs = {
+ # TODO
+ # 'CMAKE_ARCH_PATH': spec['cmake'].prefix,
+ # 'FLEX_ARCH_PATH': spec['flex'].prefix,
+ # 'ZLIB_ARCH_PATH': spec['zlib'].prefix,
+ }
+
+ # MPI content, using MPI_ARCH_PATH
+ content = mplib_content(spec, '${MPI_ARCH_PATH}')
+
+ # Content for etc/config.{csh,sh}/ files
+ self.etc_config = {
+ 'CGAL': {
+ 'BOOST_ARCH_PATH': spec['boost'].prefix,
+ 'CGAL_ARCH_PATH': spec['cgal'].prefix,
+ },
+ 'FFTW': {
+ 'FFTW_ARCH_PATH': spec['fftw'].prefix,
+ },
+ # User-defined MPI
+ 'mpi-user': [
+ ('MPI_ARCH_PATH', spec['mpi'].prefix), # Absolute
+ ('LD_LIBRARY_PATH',
+ '"%s:${LD_LIBRARY_PATH}"' % content['libdir']),
+ ('PATH', '"%s:${PATH}"' % content['bindir']),
+ ],
+ 'scotch': {},
+ 'metis': {},
+ 'paraview': [],
+ }
+
+ if '+scotch' in spec:
+ self.etc_config['scotch'] = {
+ 'SCOTCH_ARCH_PATH': spec['scotch'].prefix,
+ # For src/parallel/decompose/Allwmake
+ 'SCOTCH_VERSION': 'scotch-{0}'.format(spec['scotch'].version),
+ }
+
+ if '+metis' in spec:
+ self.etc_config['metis'] = {
+ 'METIS_ARCH_PATH': spec['metis'].prefix,
+ }
+
+ if '+paraview' in spec:
+ pvMajor = 'paraview-{0}'.format(spec['paraview'].version.up_to(2))
+ self.etc_config['paraview'] = [
+ ('ParaView_DIR', spec['paraview'].prefix),
+ ('ParaView_INCLUDE_DIR', '$ParaView_DIR/include/' + pvMajor),
+ ('PV_PLUGIN_PATH', '$FOAM_LIBBIN/' + pvMajor),
+ ('PATH', '"${ParaView_DIR}/bin:${PATH}"'),
+ ]
+
+ # Not normally included as etc/config file
+ if '+parmgridgen' in spec:
+ self.etc_config['parmgridgen'] = {
+ 'PARMGRIDGEN_ARCH_PATH': spec['parmgridgen'].prefix
+ }
+
+ # Optional
+ if '+zoltan' in spec:
+ self.etc_config['zoltan'] = {
+ 'ZOLTAN_ARCH_PATH': spec['zoltan'].prefix
+ }
+
+ # Write prefs files according to the configuration.
+ # Only need prefs.sh for building, but install both for end-users
+ if self.etc_prefs:
+ write_environ(
+ self.etc_prefs,
+ posix=join_path('etc', 'prefs.sh'),
+ cshell=join_path('etc', 'prefs.csh'))
+
+ # Adjust components to use SPACK variants
+ for component, subdict in self.etc_config.iteritems():
+ write_environ(
+ subdict,
+ posix=join_path('etc', 'config.sh', component),
+ cshell=join_path('etc', 'config.csh', component))
+
+ archCompiler = self.foam_sys['WM_ARCH'] + self.foam_sys['WM_COMPILER']
+ compileOpt = self.foam_cfg['WM_COMPILE_OPTION']
+ general_rule = join_path('wmake', 'rules', 'General')
+ compiler_rule = join_path('wmake', 'rules', archCompiler)
+ generate_mplib_rules(general_rule, self.spec)
+ generate_compiler_rules(compiler_rule, compileOpt, self.rpath_info)
+ # Record the spack spec information
+ with open("log.spack-spec", 'w') as outfile:
+ outfile.write(spec.tree())
+
+ def build(self, spec, prefix):
+ """Build using the OpenFOAM Allwmake script, with a wrapper to source
+ its environment first.
+ """
+ self.set_openfoam() # Force proper population of foam_cfg/foam_sys
+ args = ['-silent']
+ if self.parallel: # Build in parallel? - pass as an argument
+ args.append(
+ '-j{0}'.format(str(self.make_jobs) if self.make_jobs else ''))
+ builder = Executable(self.build_script)
+ builder(*args)
+
+ def install(self, spec, prefix):
+ """Install under the projectdir (== prefix)"""
+ self.build(spec, prefix) # Should be a separate phase
+ opts = self.wm_options
+
+ mkdirp(self.projectdir)
+ projdir = os.path.basename(self.projectdir)
+ wm_setting = {
+ 'WM_PROJECT_INST_DIR': os.path.dirname(self.projectdir),
+ 'WM_PROJECT_DIR': join_path('$WM_PROJECT_INST_DIR', projdir),
+ }
+
+ # Retain build log file
+ out = "spack-build.out"
+ if isfile(out):
+ install(out, join_path(self.projectdir, "log." + opts))
+
+ # All top-level files, except spack build info and possibly Allwmake
+ if '+source' in spec:
+ ignored = re.compile(r'^spack-.*')
+ else:
+ ignored = re.compile(r'^(Allwmake|spack-).*')
+
+ files = [
+ f for f in glob.glob("*") if isfile(f) and not ignored.search(f)
+ ]
+ for f in files:
+ install(f, self.projectdir)
+
+ # Having wmake without sources is actually somewhat pointless...
+ dirs = ['bin', 'etc', 'wmake']
+ if '+source' in spec:
+ dirs.extend(['applications', 'src', 'tutorials'])
+
+ for d in dirs:
+ install_tree(
+ d,
+ join_path(self.projectdir, d))
+
+ dirs = ['platforms']
+ if '+source' in spec:
+ dirs.extend(['doc'])
+
+ # Install platforms (and doc) skipping intermediate targets
+ ignored = ['src', 'applications', 'html', 'Guides']
+ for d in dirs:
+ install_tree(
+ d,
+ join_path(self.projectdir, d),
+ ignore=shutil.ignore_patterns(*ignored))
+
+ rewrite_environ_files( # Adjust etc/bashrc and etc/cshrc
+ wm_setting,
+ posix=join_path(self.etc, 'bashrc'),
+ cshell=join_path(self.etc, 'cshrc'))
+ self.install_links()
+
+ def install_links(self):
+ """Add symlinks into bin/, lib/ (eg, for other applications)"""
+ if not self.extra_symlinks:
+ return
+
+ # ln -s platforms/linux64GccXXX/lib lib
+ with working_dir(self.projectdir):
+ if isdir(self.archlib):
+ os.symlink(self.archlib, 'lib')
+
+ # (cd bin && ln -s ../platforms/linux64GccXXX/bin/* .)
+ with working_dir(join_path(self.projectdir, 'bin')):
+ for f in [
+ f for f in glob.glob(join_path('..', self.archbin, "*"))
+ if isfile(f)
+ ]:
+ os.symlink(f, os.path.basename(f))
+
+# -----------------------------------------------------------------------------
diff --git a/var/spack/repos/builtin/packages/openfoam-com/scotch-metis-lib-1612.patch b/var/spack/repos/builtin/packages/openfoam-com/scotch-metis-lib-1612.patch
new file mode 100644
index 0000000000..b7530e6320
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/scotch-metis-lib-1612.patch
@@ -0,0 +1,48 @@
+--- OpenFOAM-v1612+.orig/src/parallel/decompose/Allwmake 2017-03-21 16:34:44.599021283 +0100
++++ OpenFOAM-v1612+/src/parallel/decompose/Allwmake 2017-03-21 16:28:57.243969660 +0100
+@@ -36,6 +36,7 @@
+
+ # Library
+ [ -r $FOAM_EXT_LIBBIN/libmetis.so ] || \
++ [ -r $METIS_ARCH_PATH/lib/libmetis.so ] || \
+ [ -r $METIS_ARCH_PATH/lib$WM_COMPILER_LIB_ARCH/libmetis.so ] || \
+ [ "${METIS_ARCH_PATH##*-}" = system ] || {
+ echo "$warning (missing library)"
+@@ -90,6 +91,7 @@
+
+ # Library
+ [ -r $FOAM_EXT_LIBBIN/libscotch.so ] || \
++ [ -r $SCOTCH_ARCH_PATH/lib/libscotch.so ] || \
+ [ -r $SCOTCH_ARCH_PATH/lib$WM_COMPILER_LIB_ARCH/libscotch.so ] || \
+ [ "${SCOTCH_ARCH_PATH##*-}" = system ] || {
+ echo "$warning (missing library)"
+--- OpenFOAM-v1612+.orig/src/parallel/decompose/metisDecomp/Make/options 2017-03-21 16:34:25.383075328 +0100
++++ OpenFOAM-v1612+/src/parallel/decompose/metisDecomp/Make/options 2017-03-21 16:30:15.727758338 +0100
+@@ -8,6 +8,7 @@
+ * to support central, non-thirdparty installations
+ */
+ LIB_LIBS = \
++ -L$(METIS_ARCH_PATH)/lib \
+ -L$(METIS_ARCH_PATH)/lib$(WM_COMPILER_LIB_ARCH) \
+ -L$(FOAM_EXT_LIBBIN) \
+ -lmetis
+--- OpenFOAM-v1612+.orig/src/parallel/decompose/ptscotchDecomp/Make/options 2017-03-21 16:34:34.607049385 +0100
++++ OpenFOAM-v1612+/src/parallel/decompose/ptscotchDecomp/Make/options 2017-03-21 16:30:00.479799399 +0100
+@@ -16,6 +16,7 @@
+ * to support central, non-thirdparty installations
+ */
+ LIB_LIBS = \
++ -L$(SCOTCH_ARCH_PATH)/lib \
+ -L$(SCOTCH_ARCH_PATH)/lib$(WM_COMPILER_LIB_ARCH) \
+ -L$(FOAM_EXT_LIBBIN) \
+ -L$(FOAM_EXT_LIBBIN)/$(FOAM_MPI) \
+--- OpenFOAM-v1612+.orig/src/parallel/decompose/scotchDecomp/Make/options 2017-03-21 16:34:39.159036582 +0100
++++ OpenFOAM-v1612+/src/parallel/decompose/scotchDecomp/Make/options 2017-03-21 16:29:46.719836452 +0100
+@@ -16,6 +16,7 @@
+ * to support central, non-thirdparty installations
+ */
+ LIB_LIBS = \
++ -L$(SCOTCH_ARCH_PATH)/lib \
+ -L$(SCOTCH_ARCH_PATH)/lib$(WM_COMPILER_LIB_ARCH) \
+ -L$(FOAM_EXT_LIBBIN) \
+ -lscotch \
diff --git a/var/spack/repos/builtin/packages/openfoam-com/zoltan-lib-1612.patch b/var/spack/repos/builtin/packages/openfoam-com/zoltan-lib-1612.patch
new file mode 100644
index 0000000000..712e6a7dfd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-com/zoltan-lib-1612.patch
@@ -0,0 +1,84 @@
+--- OpenFOAM-v1612+.orig/applications/utilities/mesh/manipulation/renumberMesh/Allwmake 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/applications/utilities/mesh/manipulation/renumberMesh/Allwmake 2017-03-28 11:13:35.222727218 +0200
+@@ -4,20 +4,35 @@
+ # Parse arguments for compilation (at least for error catching)
+ . $WM_PROJECT_DIR/wmake/scripts/AllwmakeParseArguments
+
+-export COMPILE_FLAGS=''
+-export LINK_FLAGS=''
++unset COMP_FLAGS LINK_FLAGS
+
+ if [ -f "${FOAM_LIBBIN}/libSloanRenumber.so" ]
+ then
+- echo "Found libSloanRenumber.so -- enabling Sloan renumbering support."
++ echo " found libSloanRenumber -- enabling sloan renumbering support."
+ export LINK_FLAGS="${LINK_FLAGS} -lSloanRenumber"
+ fi
+
+-if [ -f "${ZOLTAN_ARCH_PATH}/lib/libzoltan.a" -a -f "${FOAM_LIBBIN}/libzoltanRenumber.so" ]
++if [ -f "${FOAM_LIBBIN}/libzoltanRenumber.so" ]
+ then
+- echo "Found libzoltanRenumber.so -- enabling zoltan renumbering support."
+- export COMPILE_FLAGS="-DFOAM_USE_ZOLTAN"
+- export LINK_FLAGS="${LINK_FLAGS} -lzoltanRenumber -L${ZOLTAN_ARCH_PATH}/lib -lzoltan"
++ if [ -z "$ZOLTAN_ARCH_PATH" ]
++ then
++ # Optional: get ZOLTAN_ARCH_PATH
++ if settings=$($WM_PROJECT_DIR/bin/foamEtcFile config.sh/zoltan)
++ then
++ . $settings
++ fi
++ fi
++
++ for libdir in lib "lib${WM_COMPILER_LIB_ARCH}"
++ do
++ if [ -f "$ZOLTAN_ARCH_PATH/$libdir/libzoltan.a" ]
++ then
++ echo " found libzoltanRenumber -- enabling zoltan renumbering support."
++ export COMP_FLAGS="-DFOAM_USE_ZOLTAN"
++ export LINK_FLAGS="${LINK_FLAGS} -lzoltanRenumber -L$ZOLTAN_ARCH_PATH/$libdir -lzoltan"
++ break
++ fi
++ done
+ fi
+
+ wmake $targetType
+--- OpenFOAM-v1612+.orig/src/renumber/Allwmake 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/src/renumber/Allwmake 2017-03-28 11:10:22.195543610 +0200
+@@ -5,14 +5,11 @@
+ targetType=libso
+ . $WM_PROJECT_DIR/wmake/scripts/AllwmakeParseArguments
+
+-## Get ZOLTAN_ARCH_PATH
+-#if settings=$($WM_PROJECT_DIR/bin/foamEtcFile config.sh/zoltan)
+-#then
+-# . $settings
+-# echo "using ZOLTAN_ARCH_PATH=$ZOLTAN_ARCH_PATH"
+-#else
+-# echo "Error: no config.sh/zoltan settings"
+-#fi
++# Optional: get ZOLTAN_ARCH_PATH
++if settings=$($WM_PROJECT_DIR/bin/foamEtcFile config.sh/zoltan)
++then
++ . $settings
++fi
+
+ wmake $targetType renumberMethods
+
+--- OpenFOAM-v1612+.orig/src/renumber/zoltanRenumber/Make/options 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/src/renumber/zoltanRenumber/Make/options 2017-03-28 11:50:46.484343848 +0200
+@@ -4,10 +4,13 @@
+ EXE_INC = \
+ /* -DFULLDEBUG -g -O0 */ \
+ $(PFLAGS) $(PINC) \
++ ${c++LESSWARN} \
+ -I$(FOAM_SRC)/renumber/renumberMethods/lnInclude \
+ -I$(ZOLTAN_ARCH_PATH)/include/ \
+ -I$(LIB_SRC)/meshTools/lnInclude
+
+ LIB_LIBS = \
+- /* -L$(ZOLTAN_ARCH_PATH)/lib -lzoltan */ \
++ -L$(ZOLTAN_ARCH_PATH)/lib \
++ -L$(ZOLTAN_ARCH_PATH)/lib$(WM_COMPILER_LIB_ARCH) \
++ -lzoltan \
+ -lmeshTools
diff --git a/var/spack/repos/builtin/packages/openfoam-org/openfoam-etc-41.patch b/var/spack/repos/builtin/packages/openfoam-org/openfoam-etc-41.patch
new file mode 100644
index 0000000000..6fe3b7b4d3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-org/openfoam-etc-41.patch
@@ -0,0 +1,25 @@
+--- OpenFOAM-4.x.orig/etc/bashrc 2016-10-16 16:11:45.000000000 +0200
++++ OpenFOAM-4.x/etc/bashrc 2017-03-24 12:41:25.233267894 +0100
+@@ -43,17 +43,17 @@
+ # Please set to the appropriate path if the default is not correct.
+ #
+ [ $BASH_SOURCE ] && \
+-export FOAM_INST_DIR=$(cd ${BASH_SOURCE%/*/*/*} && pwd -P) || \
+-export FOAM_INST_DIR=$HOME/$WM_PROJECT
+-# export FOAM_INST_DIR=~$WM_PROJECT
+-# export FOAM_INST_DIR=/opt/$WM_PROJECT
+-# export FOAM_INST_DIR=/usr/local/$WM_PROJECT
++FOAM_INST_DIR=$(\cd $(dirname $BASH_SOURCE)/../.. && \pwd -P) || \
++FOAM_INST_DIR=$HOME/$WM_PROJECT
++# FOAM_INST_DIR=/opt/$WM_PROJECT
++# FOAM_INST_DIR=/usr/local/$WM_PROJECT
+ #
+ # Build foamyHexMesh
+ export FOAMY_HEX_MESH=yes
+ #
+ # END OF (NORMAL) USER EDITABLE PART
+ ################################################################################
++export FOAM_INST_DIR
+
+ # The default environment variables below can be overridden in a prefs.sh file
+ # located in ~/.OpenFOAM/$WM_PROJECT_VERSION, ~/.OpenFOAM,
diff --git a/var/spack/repos/builtin/packages/openfoam-org/openfoam-site.patch b/var/spack/repos/builtin/packages/openfoam-org/openfoam-site.patch
new file mode 100644
index 0000000000..6631025788
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-org/openfoam-site.patch
@@ -0,0 +1,42 @@
+diff -uw OpenFOAM-v1612+.orig/etc/config.sh/settings OpenFOAM-v1612+/etc/config.sh/settings
+--- OpenFOAM-v1612+.orig/etc/config.sh/settings 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/config.sh/settings 2017-03-23 12:22:52.002101020 +0100
+@@ -141,7 +141,7 @@
+ #------------------------------------------------------------------------------
+
+ # Location of the jobControl directory
+-export FOAM_JOB_DIR=$WM_PROJECT_INST_DIR/jobControl
++export FOAM_JOB_DIR=$HOME/$WM_PROJECT/jobControl #SPACK: non-central location
+
+ # wmake configuration
+ export WM_DIR=$WM_PROJECT_DIR/wmake
+@@ -157,7 +157,7 @@
+ export FOAM_EXT_LIBBIN=$WM_THIRD_PARTY_DIR/platforms/$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION/lib
+
+ # Site-specific directory
+-siteDir="${WM_PROJECT_SITE:-$WM_PROJECT_INST_DIR/site}"
++siteDir="${WM_PROJECT_SITE:-$WM_PROJECT/site}" #SPACK: not in parent directory
+
+ # Shared site executables/libraries
+ # Similar naming convention as ~OpenFOAM expansion
+diff -uw OpenFOAM-v1612+.orig/etc/config.csh/settings OpenFOAM-v1612+/etc/config.csh/settings
+--- OpenFOAM-v1612+.orig/etc/config.csh/settings 2016-12-23 15:22:59.000000000 +0100
++++ OpenFOAM-v1612+/etc/config.csh/settings 2017-03-23 12:23:52.737891912 +0100
+@@ -137,7 +137,7 @@
+ #------------------------------------------------------------------------------
+
+ # Location of the jobControl directory
+-setenv FOAM_JOB_DIR $WM_PROJECT_INST_DIR/jobControl
++setenv FOAM_JOB_DIR=$HOME/$WM_PROJECT/jobControl #SPACK: non-central location
+
+ # wmake configuration
+ setenv WM_DIR $WM_PROJECT_DIR/wmake
+@@ -156,7 +156,7 @@
+ if ( $?WM_PROJECT_SITE ) then
+ set siteDir=$WM_PROJECT_SITE
+ else
+- set siteDir=$WM_PROJECT_INST_DIR/site
++ set siteDir=$WM_PROJECT_DIR/site #SPACK: not in parent directory
+ endif
+
+ # Shared site executables/libraries
diff --git a/var/spack/repos/builtin/packages/openfoam-org/package.py b/var/spack/repos/builtin/packages/openfoam-org/package.py
new file mode 100644
index 0000000000..19ffd40507
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openfoam-org/package.py
@@ -0,0 +1,492 @@
+##############################################################################
+# Copyright (c) 2017 Mark Olesen, OpenCFD Ltd.
+#
+# This file was authored by Mark Olesen <mark.olesen@esi-group.com>
+# and is released as part of spack under the LGPL license.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for the LLNL notice and the LGPL.
+#
+# License
+# -------
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+#
+# Legal Notice
+# ------------
+# OPENFOAM is a trademark owned by OpenCFD Ltd
+# (producer and distributor of the OpenFOAM software via www.openfoam.com).
+# The trademark information must remain visible and unadulterated in this
+# file and via the "spack info" and comply with the term set by
+# http://openfoam.com/legal/trademark-policy.php
+#
+# This file is not part of OpenFOAM, nor does it constitute a component of an
+# OpenFOAM distribution.
+#
+##############################################################################
+#
+# Notes
+# - The openfoam-org package is a modified version of the openfoam-com package.
+# If changes are needed here, consider if they should also be applied there.
+#
+# - Building with boost/cgal is not included, since some of the logic is not
+# entirely clear and thus untested.
+# - Resolution of flex, zlib needs more attention (within OpenFOAM)
+#
+# - mpi handling: WM_MPLIB=SYSTEMMPI and use spack to populate the prefs.sh
+# for it.
+# Also provide wmake rules for special purpose 'USER' and 'USERMPI'
+# mpi implementations, in case these are required.
+#
+##############################################################################
+from spack import *
+from spack.environment import *
+import llnl.util.tty as tty
+
+import multiprocessing
+import glob
+import re
+import shutil
+import os
+from os.path import isdir, isfile
+from spack.pkg.builtin.openfoam_com import *
+
+
+class OpenfoamOrg(Package):
+ """OpenFOAM is a GPL-opensource C++ CFD-toolbox.
+ The openfoam.org release is managed by the OpenFOAM Foundation Ltd as
+ a licensee of the OPENFOAM trademark.
+ This offering is not approved or endorsed by OpenCFD Ltd,
+ producer and distributor of the OpenFOAM software via www.openfoam.com,
+ and owner of the OPENFOAM trademark.
+ """
+
+ homepage = "http://www.openfoam.org/"
+ baseurl = "https://github.com/OpenFOAM"
+ url = "https://github.com/OpenFOAM/OpenFOAM-4.x/archive/version-4.1.tar.gz"
+
+ version('4.1', '318a446c4ae6366c7296b61184acd37c',
+ url=baseurl + '/OpenFOAM-4.x/archive/version-4.1.tar.gz')
+
+ variant('int64', default=False,
+ description='Compile with 64-bit labels')
+ variant('float32', default=False,
+ description='Compile with 32-bit scalar (single-precision)')
+
+ variant('source', default=True,
+ description='Install library/application sources and tutorials')
+
+ #: Map spack compiler names to OpenFOAM compiler names
+ # By default, simply capitalize the first letter
+ compiler_mapping = {'intel': 'icc'}
+
+ provides('openfoam')
+ depends_on('mpi')
+ depends_on('zlib')
+ depends_on('flex@:2.6.1') # <- restriction due to scotch
+ depends_on('cmake', type='build')
+
+ # Require scotch with ptscotch - corresponds to standard OpenFOAM setup
+ depends_on('scotch~int64+mpi', when='~int64')
+ depends_on('scotch+int64+mpi', when='+int64')
+
+ # General patches
+ patch('openfoam-site.patch')
+
+ # Version-specific patches
+ patch('openfoam-etc-41.patch')
+
+ # Some user settings, to be adjusted manually or via variants
+ foam_cfg = {
+ 'WM_COMPILER': 'Gcc', # <- %compiler
+ 'WM_ARCH_OPTION': '64', # (32/64-bit on x86_64)
+ 'WM_LABEL_SIZE': '32', # <- +int64
+ 'WM_PRECISION_OPTION': 'DP', # <- +float32
+ 'WM_COMPILE_OPTION': 'SPACKOpt', # Do not change
+ 'WM_MPLIB': 'SYSTEMMPI', # Use system mpi for spack
+ }
+
+ # The system description is frequently needed
+ foam_sys = {
+ 'WM_ARCH': None,
+ 'WM_COMPILER': None,
+ 'WM_OPTIONS': None,
+ }
+
+ # Content for etc/prefs.{csh,sh}
+ etc_prefs = {}
+
+ # Content for etc/config.{csh,sh}/ files
+ etc_config = {}
+
+ build_script = './spack-Allwmake' # <- Generated by patch() method.
+ # phases = ['configure', 'build', 'install']
+ # build_system_class = 'OpenfoamCom'
+
+ # Add symlinks into bin/, lib/ (eg, for other applications)
+ extra_symlinks = False
+
+ def setup_environment(self, spack_env, run_env):
+ run_env.set('WM_PROJECT_DIR', self.projectdir)
+
+ @property
+ def _canonical(self):
+ """Canonical name for this package and version"""
+ return 'OpenFOAM-{0}'.format(self.version)
+
+ @property
+ def projectdir(self):
+ """Absolute location of project directory: WM_PROJECT_DIR/"""
+ return join_path(self.prefix, self._canonical) # <- prefix/canonical
+
+ @property
+ def etc(self):
+ """Absolute location of the OpenFOAM etc/ directory"""
+ return join_path(self.projectdir, 'etc')
+
+ @property
+ def archbin(self):
+ """Relative location of architecture-specific executables"""
+ return join_path('platforms', self.wm_options, 'bin')
+
+ @property
+ def archlib(self):
+ """Relative location of architecture-specific libraries"""
+ return join_path('platforms', self.wm_options, 'lib')
+
+ @property
+ def wm_options(self):
+ """The architecture+compiler+options for OpenFOAM"""
+ opts = self.set_openfoam()
+ return opts
+
+ @property
+ def rpath_info(self):
+ """Define 'SPACKOpt' compiler optimization file to have wmake
+ use spack information with minimum modifications to OpenFOAM
+ """
+ build_libpath = join_path(self.stage.source_path, self.archlib)
+ install_libpath = join_path(self.projectdir, self.archlib)
+
+ # 'DBUG': rpaths
+ return '{0}{1} {2}{3}'.format(
+ self.compiler.cxx_rpath_arg, install_libpath,
+ self.compiler.cxx_rpath_arg, build_libpath)
+
+ def openfoam_arch(self):
+ """Return an architecture value similar to what OpenFOAM does in
+ etc/config.sh/settings, but slightly more generous.
+ Uses and may adjust foam_cfg[WM_ARCH_OPTION] as a side-effect
+ """
+ # spec.architecture.platform is like `uname -s`, but lower-case
+ platform = self.spec.architecture.platform
+
+ # spec.architecture.target is like `uname -m`
+ target = self.spec.architecture.target
+
+ if platform == 'linux':
+ if target == 'i686':
+ self.foam_cfg['WM_ARCH_OPTION'] = '32' # Force consistency
+ elif target == 'x86_64':
+ if self.foam_cfg['WM_ARCH_OPTION'] == '64':
+ platform += '64'
+ elif target == 'ia64':
+ platform += 'ia64'
+ elif target == 'armv7l':
+ platform += 'ARM7'
+ elif target == ppc64:
+ platform += 'PPC64'
+ elif target == ppc64le:
+ platform += 'PPC64le'
+ elif platform == 'darwin':
+ if target == 'x86_64':
+ platform += 'Intel'
+ if self.foam_cfg['WM_ARCH_OPTION'] == '64':
+ platform += '64'
+ # ... and others?
+ return platform
+
+ def openfoam_compiler(self):
+ """Capitalized version of the compiler name, which usually corresponds
+ to how OpenFOAM will camel-case things.
+ Use compiler_mapping to handing special cases.
+ Also handle special compiler options (eg, KNL)
+ """
+ comp = self.compiler.name
+ if comp in self.compiler_mapping:
+ comp = self.compiler_mapping[comp]
+ comp = comp.capitalize()
+
+ if '+knl' in self.spec:
+ comp += 'KNL'
+ return comp
+
+ def set_openfoam(self):
+ """Populate foam_cfg, foam_sys according to
+ variants, architecture, compiler.
+ Returns WM_OPTIONS.
+ """
+ # Run once
+ opts = self.foam_sys['WM_OPTIONS']
+ if opts:
+ return opts
+
+ wm_arch = self.openfoam_arch()
+ wm_compiler = self.openfoam_compiler()
+ compileOpt = self.foam_cfg['WM_COMPILE_OPTION']
+
+ # Insist on a wmake rule for this architecture/compiler combination
+ archCompiler = wm_arch + wm_compiler
+ compiler_rule = join_path(
+ self.stage.source_path, 'wmake', 'rules', archCompiler)
+
+ if not isdir(compiler_rule):
+ raise RuntimeError(
+ 'No wmake rule for {0}'.format(archCompiler))
+ if not re.match(r'.+Opt$', compileOpt):
+ raise RuntimeError(
+ "WM_COMPILE_OPTION={0} is not type '*Opt'".format(compileOpt))
+
+ # Adjust for variants
+ self.foam_cfg['WM_LABEL_SIZE'] = (
+ '64' if '+int64' in self.spec else '32'
+ )
+ self.foam_cfg['WM_PRECISION_OPTION'] = (
+ 'SP' if '+float32' in self.spec else 'DP'
+ )
+
+ # ----
+ # WM_LABEL_OPTION=Int$WM_LABEL_SIZE
+ # WM_OPTIONS=$WM_ARCH$WM_COMPILER$WM_PRECISION_OPTION$WM_LABEL_OPTION$WM_COMPILE_OPTION
+ # ----
+ self.foam_sys['WM_ARCH'] = wm_arch
+ self.foam_sys['WM_COMPILER'] = wm_compiler
+ self.foam_cfg['WM_COMPILER'] = wm_compiler # For bashrc,cshrc too
+ self.foam_sys['WM_OPTIONS'] = ''.join([
+ wm_arch,
+ wm_compiler,
+ self.foam_cfg['WM_PRECISION_OPTION'],
+ 'Int', self.foam_cfg['WM_LABEL_SIZE'], # Int32/Int64
+ compileOpt
+ ])
+ return self.foam_sys['WM_OPTIONS']
+
+ def patch(self):
+ """Adjust OpenFOAM build for spack. Where needed, apply filter as an
+ alternative to normal patching.
+ """
+ self.set_openfoam() # May need foam_cfg/foam_sys information
+
+ # This is fairly horrible.
+ # The github tarfiles have weird names that do not correspond to the
+ # canonical name. We need to rename these, but leave a symlink for
+ # spack to work with.
+ #
+ # Note that this particular OpenFOAM release requires absolute
+ # directories to build correctly!
+ parent = os.path.dirname(self.stage.source_path)
+ original = os.path.basename(self.stage.source_path)
+ target = self._canonical
+ with working_dir(parent):
+ if original != target and not os.path.lexists(target):
+ os.rename(original, target)
+ os.symlink(target, original)
+ tty.info('renamed {0} -> {1}'.format(original, target))
+
+ # Avoid WM_PROJECT_INST_DIR for ThirdParty, site or jobControl.
+ # Use openfoam-site.patch to handle jobControl, site.
+ #
+ # Filter (not patch) bashrc,cshrc for additional flexibility
+ wm_setting = {
+ 'WM_THIRD_PARTY_DIR':
+ r'$WM_PROJECT_DIR/ThirdParty #SPACK: No separate third-party',
+ 'WM_VERSION': self.version, # consistency
+ 'FOAMY_HEX_MESH': '', # This is horrible (unset variable?)
+ }
+
+ rewrite_environ_files( # Adjust etc/bashrc and etc/cshrc
+ wm_setting,
+ posix=join_path('etc', 'bashrc'),
+ cshell=join_path('etc', 'cshrc'))
+
+ # Build wrapper script
+ with open(self.build_script, 'w') as out:
+ out.write(
+ """#!/bin/bash
+. $PWD/etc/bashrc '' # No arguments
+mkdir -p $FOAM_APPBIN $FOAM_LIBBIN 2>/dev/null # Allow interrupt
+echo Build openfoam with SPACK
+echo WM_PROJECT_DIR = $WM_PROJECT_DIR
+./Allwmake $@
+#
+""")
+ set_executable(self.build_script)
+ self.configure(self.spec, self.prefix) # Should be a separate phase
+
+ def configure(self, spec, prefix):
+ """Make adjustments to the OpenFOAM configuration files in their various
+ locations: etc/bashrc, etc/config.sh/FEATURE and customizations that
+ don't properly fit get placed in the etc/prefs.sh file (similiarly for
+ csh).
+ """
+ self.set_openfoam() # Need foam_cfg/foam_sys information
+
+ # Some settings for filtering bashrc, cshrc
+ wm_setting = {}
+ wm_setting.update(self.foam_cfg)
+
+ rewrite_environ_files( # Adjust etc/bashrc and etc/cshrc
+ wm_setting,
+ posix=join_path('etc', 'bashrc'),
+ cshell=join_path('etc', 'cshrc'))
+
+ # MPI content, with absolute paths
+ content = mplib_content(spec)
+
+ # Content for etc/prefs.{csh,sh}
+ self.etc_prefs = {
+ r'MPI_ROOT': spec['mpi'].prefix, # Absolute
+ r'MPI_ARCH_FLAGS': '"%s"' % content['FLAGS'],
+ r'MPI_ARCH_INC': '"%s"' % content['PINC'],
+ r'MPI_ARCH_LIBS': '"%s"' % content['PLIBS'],
+ }
+
+ # Content for etc/config.{csh,sh}/ files
+ self.etc_config = {
+ 'CGAL': {},
+ 'scotch': {},
+ 'metis': {},
+ 'paraview': [],
+ }
+
+ if True:
+ self.etc_config['scotch'] = {
+ 'SCOTCH_ARCH_PATH': spec['scotch'].prefix,
+ # For src/parallel/decompose/Allwmake
+ 'SCOTCH_VERSION': 'scotch-{0}'.format(spec['scotch'].version),
+ }
+
+ # Write prefs files according to the configuration.
+ # Only need prefs.sh for building, but install both for end-users
+ if self.etc_prefs:
+ write_environ(
+ self.etc_prefs,
+ posix=join_path('etc', 'prefs.sh'),
+ cshell=join_path('etc', 'prefs.csh'))
+
+ # Adjust components to use SPACK variants
+ for component, subdict in self.etc_config.iteritems():
+ write_environ(
+ subdict,
+ posix=join_path('etc', 'config.sh', component),
+ cshell=join_path('etc', 'config.csh', component))
+
+ archCompiler = self.foam_sys['WM_ARCH'] + self.foam_sys['WM_COMPILER']
+ compileOpt = self.foam_cfg['WM_COMPILE_OPTION']
+ general_rule = join_path('wmake', 'rules', 'General')
+ compiler_rule = join_path('wmake', 'rules', archCompiler)
+ generate_mplib_rules(general_rule, self.spec)
+ generate_compiler_rules(compiler_rule, compileOpt, self.rpath_info)
+ # Record the spack spec information
+ with open("log.spack-spec", 'w') as outfile:
+ outfile.write(spec.tree())
+
+ def build(self, spec, prefix):
+ """Build using the OpenFOAM Allwmake script, with a wrapper to source
+ its environment first.
+ """
+ self.set_openfoam() # Force proper population of foam_cfg/foam_sys
+ args = []
+ if self.parallel: # Build in parallel? - pass via the environment
+ os.environ['WM_NCOMPPROCS'] = str(self.make_jobs) \
+ if self.make_jobs else str(multiprocessing.cpu_count())
+ builder = Executable(self.build_script)
+ builder(*args)
+
+ def install(self, spec, prefix):
+ """Install under the projectdir (== prefix/name-version)"""
+ self.build(spec, prefix) # Should be a separate phase
+ opts = self.wm_options
+
+ mkdirp(self.projectdir)
+ projdir = os.path.basename(self.projectdir)
+ wm_setting = {
+ 'WM_PROJECT_INST_DIR': os.path.dirname(self.projectdir),
+ 'WM_PROJECT_DIR': join_path('$WM_PROJECT_INST_DIR', projdir),
+ }
+
+ # Retain build log file
+ out = "spack-build.out"
+ if isfile(out):
+ install(out, join_path(self.projectdir, "log." + opts))
+
+ # All top-level files, except spack build info and possibly Allwmake
+ if '+source' in spec:
+ ignored = re.compile(r'^spack-.*')
+ else:
+ ignored = re.compile(r'^(Allwmake|spack-).*')
+
+ files = [
+ f for f in glob.glob("*") if isfile(f) and not ignored.search(f)
+ ]
+ for f in files:
+ install(f, self.projectdir)
+
+ # Having wmake without sources is actually somewhat pointless...
+ dirs = ['bin', 'etc', 'wmake']
+ if '+source' in spec:
+ dirs.extend(['applications', 'src', 'tutorials'])
+
+ for d in dirs:
+ install_tree(
+ d,
+ join_path(self.projectdir, d))
+
+ dirs = ['platforms']
+ if '+source' in spec:
+ dirs.extend(['doc'])
+
+ # Install platforms (and doc) skipping intermediate targets
+ ignored = ['src', 'applications', 'html', 'Guides']
+ for d in dirs:
+ install_tree(
+ d,
+ join_path(self.projectdir, d),
+ ignore=shutil.ignore_patterns(*ignored))
+
+ rewrite_environ_files( # Adjust etc/bashrc and etc/cshrc
+ wm_setting,
+ posix=join_path(self.etc, 'bashrc'),
+ cshell=join_path(self.etc, 'cshrc'))
+ self.install_links()
+
+ def install_links(self):
+ """Add symlinks into bin/, lib/ (eg, for other applications)"""
+ if not self.extra_symlinks:
+ return
+
+ # ln -s platforms/linux64GccXXX/lib lib
+ with working_dir(self.projectdir):
+ if isdir(self.archlib):
+ os.symlink(self.archlib, 'lib')
+
+ # (cd bin && ln -s ../platforms/linux64GccXXX/bin/* .)
+ with working_dir(join_path(self.projectdir, 'bin')):
+ for f in [
+ f for f in glob.glob(join_path('..', self.archbin, "*"))
+ if isfile(f)
+ ]:
+ os.symlink(f, os.path.basename(f))
+
+# -----------------------------------------------------------------------------
diff --git a/var/spack/repos/builtin/packages/openfst/package.py b/var/spack/repos/builtin/packages/openfst/package.py
index e96575798b..9711605462 100644
--- a/var/spack/repos/builtin/packages/openfst/package.py
+++ b/var/spack/repos/builtin/packages/openfst/package.py
@@ -26,9 +26,9 @@ from spack import *
class Openfst(AutotoolsPackage):
- """OpenFst is a library for constructing, combining, optimizing,
- and searching weighted finite-state transducers (FSTs). Weighted
- finite-state transducers are automata where each transition has
+ """OpenFst is a library for constructing, combining, optimizing,
+ and searching weighted finite-state transducers (FSTs). Weighted
+ finite-state transducers are automata where each transition has
an input label, an output label, and a weight."""
homepage = "http://www.openfst.org"
@@ -45,10 +45,5 @@ class Openfst(AutotoolsPackage):
version('1.4.1', 'ca8f1730b9b9b281e515611fa9ae23c0')
version('1.4.0', '662367ec91084ffab48ee9b5716de39c')
- @run_before('autoreconf')
- def check_compilers(self):
- # Fails to built openfst with %gcc@6: or %intel@16
- if self.spec.satisfies('%intel@16:') or self.spec.satisfies('gcc@6:'):
- raise InstallError(
- 'openfst does not support Intel compiler >= 16 and gcc >= 6 '
- )
+ conflicts('%intel@16:')
+ conflicts('%gcc@6:')
diff --git a/var/spack/repos/builtin/packages/openjpeg/package.py b/var/spack/repos/builtin/packages/openjpeg/package.py
index 9790c52e7d..b22de4452a 100644
--- a/var/spack/repos/builtin/packages/openjpeg/package.py
+++ b/var/spack/repos/builtin/packages/openjpeg/package.py
@@ -43,7 +43,3 @@ class Openjpeg(CMakePackage):
version('2.0', 'cdf266530fee8af87454f15feb619609')
version('1.5.2', '545f98923430369a6b046ef3632ef95c')
version('1.5.1', 'd774e4b5a0db5f0f171c4fc0aabfa14e')
-
- def url_for_version(self, version):
- fmt = 'https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz'
- return fmt.format(version.dotted)
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index 3480311aed..7c07c515fa 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -61,7 +61,7 @@ class Openmpi(AutotoolsPackage):
homepage = "http://www.open-mpi.org"
url = "https://www.open-mpi.org/software/ompi/v2.1/downloads/openmpi-2.1.0.tar.bz2"
list_url = "http://www.open-mpi.org/software/ompi/"
- list_depth = 3
+ list_depth = 2
version('2.1.0', '4838a5973115c44e14442c01d3f21d52')
version('2.0.2', 'ecd99aa436a1ca69ce936a96d6a3fa48')
diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py
index 1432ac76fc..53e94ed445 100644
--- a/var/spack/repos/builtin/packages/openssl/package.py
+++ b/var/spack/repos/builtin/packages/openssl/package.py
@@ -37,7 +37,7 @@ class Openssl(Package):
# URL must remain http:// so Spack can bootstrap curl
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
list_url = "https://www.openssl.org/source/old/"
- list_depth = 2
+ list_depth = 1
version('1.1.0e', '51c42d152122e474754aea96f66928c6')
version('1.1.0d', '711ce3cd5f53a99c0e12a7d5804f0f63')
diff --git a/var/spack/repos/builtin/packages/panda/package.py b/var/spack/repos/builtin/packages/panda/package.py
index e30c2c869d..fb14bd5643 100644
--- a/var/spack/repos/builtin/packages/panda/package.py
+++ b/var/spack/repos/builtin/packages/panda/package.py
@@ -29,9 +29,9 @@ from spack import *
class Panda(Package):
"""PANDA: Parallel AdjaceNcy Decomposition Algorithm"""
homepage = "http://comopt.ifi.uni-heidelberg.de/software/PANDA/index.html"
- url = "http://comopt.ifi.uni-heidelberg.de/software/PANDA/downloads/current_panda.tar"
+ url = "http://comopt.ifi.uni-heidelberg.de/software/PANDA/downloads/panda-2016-03-07.tar"
- version('current', 'b06dc312ee56e13eefea9c915b70fcef')
+ version('2016-03-07', 'b06dc312ee56e13eefea9c915b70fcef')
# Note: Panda can also be built without MPI support
diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py
index 0f082527b9..cc6565dae3 100644
--- a/var/spack/repos/builtin/packages/pango/package.py
+++ b/var/spack/repos/builtin/packages/pango/package.py
@@ -33,7 +33,7 @@ class Pango(AutotoolsPackage):
homepage = "http://www.pango.org"
url = "http://ftp.gnome.org/pub/GNOME/sources/pango/1.40/pango-1.40.3.tar.xz"
list_url = "http://ftp.gnome.org/pub/gnome/sources/pango/"
- list_depth = 2
+ list_depth = 1
version('1.40.3', 'abba8b5ce728520c3a0f1535eab19eac3c14aeef7faa5aded90017ceac2711d3')
version('1.40.1', 'e27af54172c72b3ac6be53c9a4c67053e16c905e02addcf3a603ceb2005c1a40')
diff --git a/var/spack/repos/builtin/packages/parallel/package.py b/var/spack/repos/builtin/packages/parallel/package.py
index 32c3b3f5cd..11e9497288 100644
--- a/var/spack/repos/builtin/packages/parallel/package.py
+++ b/var/spack/repos/builtin/packages/parallel/package.py
@@ -34,6 +34,7 @@ class Parallel(AutotoolsPackage):
homepage = "http://www.gnu.org/software/parallel/"
url = "http://ftp.gnu.org/gnu/parallel/parallel-20170122.tar.bz2"
+ version('20170322', '4fe1b8d2e3974d26c77f0b514988214d')
version('20170122', 'c9f0ec01463dc75dbbf292fd8be5f1eb')
version('20160422', '24621f684130472694333709bd4454cb')
version('20160322', '4e81e0d36902ab4c4e969ee6f35e6e57')
diff --git a/var/spack/repos/builtin/packages/paraview/gcc-compiler-pv501.patch b/var/spack/repos/builtin/packages/paraview/gcc-compiler-pv501.patch
new file mode 100644
index 0000000000..bb3be1832f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/paraview/gcc-compiler-pv501.patch
@@ -0,0 +1,22 @@
+--- ParaView-5.0.1.orig/VTK/CMake/vtkCompilerExtras.cmake 2016-03-28 17:07:10.000000000 +0200
++++ ParaView-5.0.1/VTK/CMake/vtkCompilerExtras.cmake 2016-12-13 17:21:25.382720945 +0100
+@@ -32,7 +32,7 @@
+ OUTPUT_VARIABLE _gcc_version_info
+ ERROR_VARIABLE _gcc_version_info)
+
+- string (REGEX MATCH "[345]\\.[0-9]\\.[0-9]*"
++ string (REGEX MATCH "[3-9]\\.[0-9]\\.[0-9]*"
+ _gcc_version "${_gcc_version_info}")
+ if(NOT _gcc_version)
+ string (REGEX REPLACE ".*\\(GCC\\).*([34]\\.[0-9]).*" "\\1.0"
+--- ParaView-5.0.1.orig/VTK/CMake/GenerateExportHeader.cmake 2016-03-28 17:07:10.000000000 +0200
++++ ParaView-5.0.1/VTK/CMake/GenerateExportHeader.cmake 2016-12-13 17:21:25.382720945 +0100
+@@ -166,7 +166,7 @@
+ execute_process(COMMAND ${CMAKE_C_COMPILER} ARGS --version
+ OUTPUT_VARIABLE _gcc_version_info
+ ERROR_VARIABLE _gcc_version_info)
+- string(REGEX MATCH "[345]\\.[0-9]\\.[0-9]*"
++ string(REGEX MATCH "[3-9]\\.[0-9]\\.[0-9]*"
+ _gcc_version "${_gcc_version_info}")
+ # gcc on mac just reports: "gcc (GCC) 3.3 20030304 ..." without the
+ # patch level, handle this here:
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
index 252f58247f..d21b700e09 100644
--- a/var/spack/repos/builtin/packages/paraview/package.py
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -25,32 +25,36 @@
from spack import *
-class Paraview(Package):
+class Paraview(CMakePackage):
+ """ParaView is an open-source, multi-platform data analysis and
+ visualization application."""
+
homepage = 'http://www.paraview.org'
- url = 'http://www.paraview.org/files/v5.0/ParaView-v'
- _url_str = 'http://www.paraview.org/files/v%s/ParaView-v%s-source.tar.gz'
+ url = "http://www.paraview.org/files/v5.3/ParaView-v5.3.0.tar.gz"
+ _urlfmt = 'http://www.paraview.org/files/v{0}/ParaView-v{1}{2}.tar.gz'
+ version('5.3.0', '68fbbbe733aa607ec13d1db1ab5eba71')
+ version('5.2.0', '4570d1a2a183026adb65b73c7125b8b0')
+ version('5.1.2', '44fb32fc8988fcdfbc216c9e40c3e925')
+ version('5.0.1', 'fdf206113369746e2276b95b257d2c9b')
version('4.4.0', 'fa1569857dd680ebb4d7ff89c2227378')
- version('5.0.0', '4598f0b421460c8bbc635c9a1c3bdbee')
+ variant('plugins', default=True,
+ description='Install include files for plugins support')
variant('python', default=False, description='Enable Python support')
-
- variant('tcl', default=False, description='Enable TCL support')
-
variant('mpi', default=True, description='Enable MPI support')
-
variant('osmesa', default=False, description='Enable OSMesa support')
- variant('qt', default=False, description='Enable Qt support')
- variant('opengl2', default=False, description='Enable OpenGL2 backend')
+ variant('qt', default=False, description='Enable Qt (gui) support')
+ variant('opengl2', default=True, description='Enable OpenGL2 backend')
depends_on('python@2:2.7', when='+python')
depends_on('py-numpy', when='+python', type='run')
depends_on('py-matplotlib', when='+python', type='run')
- depends_on('tcl', when='+tcl')
depends_on('mpi', when='+mpi')
depends_on('qt@:4', when='+qt')
+ # TODO# depends_on('qt@:4', when='@:5.2.0+qt')
+ # TODO# depends_on('qt@5', when='@5.3.0:+qt')
- depends_on('cmake', type='build')
depends_on('bzip2')
depends_on('freetype')
# depends_on('hdf5+mpi', when='+mpi')
@@ -65,61 +69,71 @@ class Paraview(Package):
# depends_on('sqlite') # external version not supported
depends_on('zlib')
+ patch('stl-reader-pv440.patch', when='@4.4.0')
+
+ # Broken gcc-detection - improved in 5.1.0, redundant later
+ patch('gcc-compiler-pv501.patch', when='@:5.0.1')
+
+ # Broken installation (ui_pqExportStateWizard.h) - fixed in 5.2.0
+ patch('ui_pqExportStateWizard.patch', when='@:5.1.2')
+
def url_for_version(self, version):
"""Handle ParaView version-based custom URLs."""
- return self._url_str % (version.up_to(2), version)
-
- def install(self, spec, prefix):
- with working_dir('spack-build', create=True):
- def feature_to_bool(feature, on='ON', off='OFF'):
- if feature in spec:
- return on
- return off
-
- def nfeature_to_bool(feature):
- return feature_to_bool(feature, on='OFF', off='ON')
-
- feature_args = std_cmake_args[:]
- feature_args.append(
- '-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt'))
- feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' %
- feature_to_bool('+python'))
- if '+python' in spec:
- feature_args.append(
- '-DPYTHON_EXECUTABLE:FILEPATH=%s/bin/python'
- % spec['python'].prefix)
- feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' %
- feature_to_bool('+mpi'))
- if '+mpi' in spec:
- feature_args.append(
- '-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix)
- feature_args.append(
- '-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl'))
- feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' %
- feature_to_bool('+osmesa'))
- feature_args.append('-DVTK_USE_X:BOOL=%s' %
- nfeature_to_bool('+osmesa'))
- feature_args.append(
- '-DVTK_RENDERING_BACKEND:STRING=%s' %
- feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL'))
-
- feature_args.extend(std_cmake_args)
-
- if 'darwin' in self.spec.architecture:
- feature_args.append('-DVTK_USE_X:BOOL=OFF')
- feature_args.append(
- '-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON')
-
- cmake('..',
- '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix,
- '-DBUILD_TESTING:BOOL=OFF',
- '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON',
- '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF',
- '-DVTK_USE_SYSTEM_JPEG:BOOL=ON',
- '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON',
- '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF',
- '-DVTK_USE_SYSTEM_TIFF:BOOL=ON',
- '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON',
- *feature_args)
- make()
- make('install')
+ if version < Version('5.1.0'):
+ return self._urlfmt.format(version.up_to(2), version, '-source')
+ else:
+ return self._urlfmt.format(version.up_to(2), version, '')
+
+ def cmake_args(self):
+ """Populate cmake arguments for ParaView."""
+ spec = self.spec
+
+ def variant_bool(feature, on='ON', off='OFF'):
+ """Ternary for spec variant to ON/OFF string"""
+ if feature in spec:
+ return on
+ return off
+
+ def nvariant_bool(feature):
+ """Negated ternary for spec variant to OFF/ON string"""
+ return variant_bool(feature, on='OFF', off='ON')
+
+ rendering = variant_bool('+opengl2', 'OpenGL2', 'OpenGL')
+ includes = variant_bool('+plugins')
+
+ cmake_args = [
+ '-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % variant_bool('+qt'),
+ '-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % variant_bool('+osmesa'),
+ '-DVTK_USE_X:BOOL=%s' % nvariant_bool('+osmesa'),
+ '-DVTK_RENDERING_BACKEND:STRING=%s' % rendering,
+ '-DPARAVIEW_INSTALL_DEVELOPMENT_FILES:BOOL=%s' % includes,
+ '-DBUILD_TESTING:BOOL=OFF',
+ '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON',
+ '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF',
+ '-DVTK_USE_SYSTEM_JPEG:BOOL=ON',
+ '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON',
+ '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF',
+ '-DVTK_USE_SYSTEM_TIFF:BOOL=ON',
+ '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON',
+ ]
+
+ if '+python' in spec:
+ cmake_args.extend([
+ '-DPARAVIEW_ENABLE_PYTHON:BOOL=ON',
+ '-DPYTHON_EXECUTABLE:FILEPATH=%s/bin/python'
+ % spec['python'].prefix
+ ])
+
+ if '+mpi' in spec:
+ cmake_args.extend([
+ '-DPARAVIEW_USE_MPI:BOOL=ON',
+ '-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix
+ ])
+
+ if 'darwin' in self.spec.architecture:
+ cmake_args.extend([
+ '-DVTK_USE_X:BOOL=OFF',
+ '-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON',
+ ])
+
+ return cmake_args
diff --git a/var/spack/repos/builtin/packages/paraview/stl-reader-pv440.patch b/var/spack/repos/builtin/packages/paraview/stl-reader-pv440.patch
new file mode 100644
index 0000000000..06907fd895
--- /dev/null
+++ b/var/spack/repos/builtin/packages/paraview/stl-reader-pv440.patch
@@ -0,0 +1,11 @@
+--- ParaView-4.4.0.orig/VTK/IO/Geometry/vtkSTLReader.cxx 2015-09-11 19:59:24.000000000 +0200
++++ ParaView-4.4.0/VTK/IO/Geometry/vtkSTLReader.cxx 2016-06-19 12:59:50.769770143 +0200
+@@ -448,7 +448,7 @@
+ done = done || (fscanf(fp,"%s", line)==EOF);
+ }
+ }
+- if (!done)
++ else if (!done)
+ {
+ done = (fgets(line, 255, fp) == 0);
+ lineCount++;
diff --git a/var/spack/repos/builtin/packages/paraview/ui_pqExportStateWizard.patch b/var/spack/repos/builtin/packages/paraview/ui_pqExportStateWizard.patch
new file mode 100644
index 0000000000..2983af56ef
--- /dev/null
+++ b/var/spack/repos/builtin/packages/paraview/ui_pqExportStateWizard.patch
@@ -0,0 +1,11 @@
+--- ParaView-5.0.1.orig/Qt/Components/CMakeLists.txt 2016-03-28 17:07:03.000000000 +0200
++++ ParaView-5.0.1/Qt/Components/CMakeLists.txt 2016-12-13 17:38:42.713553032 +0100
+@@ -656,7 +656,7 @@
+ #the pqSGExportStateWizard has subclasses that directly access
+ #the UI file, and currently we don't have a clean way to break this hard
+ #dependency, so for no we install this ui file.
+-if(PARAVIEW_INSTALL_DEVELOPMENT_FILES)
++if(PARAVIEW_INSTALL_DEVELOPMENT_FILES AND PARAVIEW_ENABLE_PYTHON)
+ install(FILES "${CMAKE_CURRENT_BINARY_DIR}/ui_pqExportStateWizard.h"
+ DESTINATION "${VTK_INSTALL_INCLUDE_DIR}")
+ endif()
diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py
index 0e6cd5390a..b07c796dd7 100644
--- a/var/spack/repos/builtin/packages/parmetis/package.py
+++ b/var/spack/repos/builtin/packages/parmetis/package.py
@@ -33,7 +33,8 @@ class Parmetis(Package):
computing fill-reducing orderings of sparse matrices."""
homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview'
- base_url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'
+ url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz'
+ list_url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/OLD'
version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628')
version('4.0.2', '0912a953da5bb9b5e5e10542298ffdce')
@@ -54,8 +55,11 @@ class Parmetis(Package):
patch('pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch')
def url_for_version(self, version):
- verdir = 'OLD/' if version < Version('3.2.0') else ''
- return '%s/%sparmetis-%s.tar.gz' % (Parmetis.base_url, verdir, version)
+ url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'
+ if version < Version('3.2.0'):
+ url += '/OLD'
+ url += '/parmetis-{0}.tar.gz'.format(version)
+ return url
def install(self, spec, prefix):
source_directory = self.stage.source_path
@@ -72,7 +76,7 @@ class Parmetis(Package):
if '+shared' in spec:
options.append('-DSHARED:BOOL=ON')
else:
- # Remove all RPATH options
+ # Remove all RPATH options
# (RPATHxxx options somehow trigger cmake to link dynamically)
rpath_options = []
for o in options:
diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py
index 1d429361b6..d36366f557 100644
--- a/var/spack/repos/builtin/packages/patchelf/package.py
+++ b/var/spack/repos/builtin/packages/patchelf/package.py
@@ -33,7 +33,7 @@ class Patchelf(AutotoolsPackage):
url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz"
list_url = "http://nixos.org/releases/patchelf/"
- list_depth = 2
+ list_depth = 1
version('0.9', '3c265508526760f233620f35d79c79fc')
version('0.8', '407b229e6a681ffb0e2cdd5915cb2d01')
diff --git a/var/spack/repos/builtin/packages/perl-dbi/package.py b/var/spack/repos/builtin/packages/perl-dbi/package.py
new file mode 100644
index 0000000000..d1c6a11fb4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-dbi/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PerlDbi(PerlPackage):
+ """The DBI is the standard database interface module for Perl. It defines
+ a set of methods, variables and conventions that provide a consistent
+ database interface independent of the actual database being used."""
+
+ homepage = "https://dbi.perl.org/"
+ url = "http://search.cpan.org/CPAN/authors/id/T/TI/TIMB/DBI-1.636.tar.gz"
+
+ version('1.636', '60f291e5f015550dde71d1858dfe93ba')
diff --git a/var/spack/repos/builtin/packages/perl-module-build/package.py b/var/spack/repos/builtin/packages/perl-module-build/package.py
new file mode 100644
index 0000000000..cccc5d7b5a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-module-build/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+from spack import *
+
+
+class PerlModuleBuild(PerlPackage):
+ """Module::Build is a system for building, testing, and installing Perl
+ modules. It is meant to be an alternative to ExtUtils::MakeMaker.
+ Developers may alter the behavior of the module through subclassing in a
+ much more straightforward way than with MakeMaker. It also does not
+ require a make on your system - most of the Module::Build code is
+ pure-perl and written in a very cross-platform way.
+ """
+
+ homepage = "http://search.cpan.org/perldoc/Module::Build"
+ url = "http://search.cpan.org/CPAN/authors/id/L/LE/LEONT/Module-Build-0.4220.tar.gz"
+
+ version('0.4220', '9df204e188462a4410d496f316c2c531')
diff --git a/var/spack/repos/builtin/packages/perl-term-readkey/package.py b/var/spack/repos/builtin/packages/perl-term-readkey/package.py
new file mode 100644
index 0000000000..2b1f93cbc0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-term-readkey/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PerlTermReadkey(PerlPackage):
+ """Term::ReadKey is a compiled perl module dedicated to providing simple
+ control over terminal driver modes (cbreak, raw, cooked, etc.,) support
+ for non-blocking reads, if the architecture allows, and some generalized
+ handy functions for working with terminals. One of the main goals is to
+ have the functions as portable as possible, so you can just plug in
+ "use Term::ReadKey" on any architecture and have a good likelihood of it
+ working."""
+
+ homepage = "http://search.cpan.org/perldoc/Term::ReadKey"
+ url = "http://www.cpan.org/authors/id/J/JS/JSTOWE/TermReadKey-2.37.tar.gz"
+ list_url = "http://www.cpan.org/authors/id/J/JS/JSTOWE"
+
+ version('2.37', 'e8ea15c16333ac4f8d146d702e83cc0c')
diff --git a/var/spack/repos/builtin/packages/perl-xml-parser/package.py b/var/spack/repos/builtin/packages/perl-xml-parser/package.py
new file mode 100644
index 0000000000..5ceb380473
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl-xml-parser/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+from spack import *
+
+
+class PerlXmlParser(PerlPackage):
+ """XML::Parser - A perl module for parsing XML documents"""
+
+ homepage = "http://search.cpan.org/perldoc/XML::Parser"
+ url = "http://search.cpan.org/CPAN/authors/id/T/TO/TODDR/XML-Parser-2.44.tar.gz"
+
+ version('2.44', 'af4813fe3952362451201ced6fbce379')
+
+ depends_on('expat')
diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py
index 4bacad427b..4ae140454d 100644
--- a/var/spack/repos/builtin/packages/perl/package.py
+++ b/var/spack/repos/builtin/packages/perl/package.py
@@ -23,6 +23,8 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
#
+# Author: Milton Woods <milton.woods@bom.gov.au>
+# Date: March 22, 2017
# Author: George Hartzell <hartzell@alerce.com>
# Date: July 21, 2016
# Author: Justin Too <justin@doubleotoo.com>
@@ -34,17 +36,27 @@ from spack import *
class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
"""Perl 5 is a highly capable, feature-rich programming language with over
27 years of development."""
+
homepage = "http://www.perl.org"
# URL must remain http:// so Spack can bootstrap curl
- url = "http://www.cpan.org/src/5.0/perl-5.24.1.tar.gz"
+ url = "http://www.cpan.org/src/5.0/perl-5.24.1.tar.gz"
+
+ # Development releases
+ version('5.25.11', '37a398682c36cd85992b34b5c1c25dc1')
- version('5.24.1', '765ef511b5b87a164e2531403ee16b3c')
- version('5.24.0', 'c5bf7f3285439a2d3b6a488e14503701')
+ # Maintenance releases (recommended)
+ version('5.24.1', '765ef511b5b87a164e2531403ee16b3c', preferred=True)
+ version('5.22.3', 'aa4f236dc2fc6f88b871436b8d0fda95')
+
+ # Misc releases that people need
version('5.22.2', '5767e2a10dd62a46d7b57f74a90d952b')
+
+ # End of life releases
version('5.20.3', 'd647d0ea5a7a8194c34759ab9f2610cd')
- # 5.18.4 fails with gcc-5
- # https://rt.perl.org/Public/Bug/Display.html?id=123784
- # version('5.18.4' , '1f9334ff730adc05acd3dd7130d295db')
+ version('5.18.4', '1f9334ff730adc05acd3dd7130d295db')
+ version('5.16.3', 'eb5c40f2575df6c155bc99e3fe0a9d82')
+
+ extendable = True
# Installing cpanm alongside the core makes it safe and simple for
# people/projects to install their own sets of perl modules. Not
@@ -61,18 +73,42 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
placement="cpanm"
)
- def install(self, spec, prefix):
- configure = Executable('./Configure')
- configure_args = ["-des", "-Dprefix=" + prefix]
+ phases = ['configure', 'build', 'install']
+
+ def configure_args(self):
+ spec = self.spec
+ prefix = self.prefix
+
+ config_args = [
+ '-des',
+ '-Dprefix={0}'.format(prefix)
+ ]
+
# Discussion of -fPIC for Intel at:
# https://github.com/LLNL/spack/pull/3081
if spec.satisfies('%intel'):
- configure_args.append("-Accflags=" + self.compiler.pic_flag)
- configure(*configure_args)
+ config_args.append('-Accflags={0}'.format(self.compiler.pic_flag))
+
+ return config_args
+
+ def configure(self, spec, prefix):
+ configure = Executable('./Configure')
+ configure(*self.configure_args())
+
+ def build(self, spec, prefix):
make()
- if self.run_tests:
- make("test")
- make("install")
+
+ @on_package_attributes(run_tests=True)
+ def test(self):
+ make('test')
+
+ def install(self, spec, prefix):
+ make('install')
+
+ @run_after('install')
+ def install_cpanm(self):
+ spec = self.spec
+ prefix = self.prefix
if '+cpanm' in spec:
with working_dir(join_path('cpanm', 'cpanm')):
@@ -80,3 +116,42 @@ class Perl(Package): # Perl doesn't use Autotools, it should subclass Package
perl('Makefile.PL')
make()
make('install')
+
+ def setup_environment(self, spack_env, run_env):
+ """Set PERL5LIB to support activation of Perl packages"""
+ run_env.set('PERL5LIB', join_path(self.prefix, 'lib', 'perl5'))
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ """Set PATH and PERL5LIB to include the extension and
+ any other perl extensions it depends on,
+ assuming they were installed with INSTALL_BASE defined."""
+ perl_lib_dirs = []
+ perl_bin_dirs = []
+ for d in dependent_spec.traverse(
+ deptype=('build', 'run'), deptype_query='run'):
+ if d.package.extends(self.spec):
+ perl_lib_dirs.append(join_path(d.prefix, 'lib', 'perl5'))
+ perl_bin_dirs.append(join_path(d.prefix, 'bin'))
+ perl_bin_path = ':'.join(perl_bin_dirs)
+ perl_lib_path = ':'.join(perl_lib_dirs)
+ spack_env.prepend_path('PATH', perl_bin_path)
+ spack_env.prepend_path('PERL5LIB', perl_lib_path)
+ run_env.prepend_path('PATH', perl_bin_path)
+ run_env.prepend_path('PERL5LIB', perl_lib_path)
+
+ def setup_dependent_package(self, module, dependent_spec):
+ """Called before perl modules' install() methods.
+ In most cases, extensions will only need to have one line:
+ perl('Makefile.PL','INSTALL_BASE=%s' % self.prefix)
+ """
+
+ # perl extension builds can have a global perl executable function
+ module.perl = Executable(join_path(self.spec.prefix.bin, 'perl'))
+
+ # Add variables for library directory
+ module.perl_lib_dir = join_path(dependent_spec.prefix, 'lib', 'perl5')
+
+ # Make the site packages directory for extensions,
+ # if it does not exist already.
+ if dependent_spec.package.is_extension:
+ mkdirp(module.perl_lib_dir)
diff --git a/var/spack/repos/builtin/packages/petsc/macos-clang-8.1.0.diff b/var/spack/repos/builtin/packages/petsc/macos-clang-8.1.0.diff
new file mode 100644
index 0000000000..b4384d3ac3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/petsc/macos-clang-8.1.0.diff
@@ -0,0 +1,18 @@
+diff --git a/config/BuildSystem/config/libraries.py b/config/BuildSystem/config/libraries.py
+index 0af92f0..8644b55 100644
+--- a/config/BuildSystem/config/libraries.py
++++ b/config/BuildSystem/config/libraries.py
+@@ -50,12 +50,7 @@ class Configure(config.base.Configure):
+ flagName = self.language[-1]+'SharedLinkerFlag'
+ flagSubst = self.language[-1].upper()+'_LINKER_SLFLAG'
+ dirname = os.path.dirname(library).replace('\\ ',' ').replace(' ', '\\ ').replace('\\(','(').replace('(', '\\(').replace('\\)',')').replace(')', '\\)')
+- if hasattr(self.setCompilers, flagName) and not getattr(self.setCompilers, flagName) is None:
+- return [getattr(self.setCompilers, flagName)+dirname,'-L'+dirname,'-l'+name]
+- if flagSubst in self.argDB:
+- return [self.argDB[flagSubst]+dirname,'-L'+dirname,'-l'+name]
+- else:
+- return ['-L'+dirname,' -l'+name]
++ return ['-L'+dirname,' -l'+name]
+ else:
+ return ['-l'+name]
+ if os.path.splitext(library)[1] == '.so':
diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py
index 3e670cba63..b63172135e 100644
--- a/var/spack/repos/builtin/packages/petsc/package.py
+++ b/var/spack/repos/builtin/packages/petsc/package.py
@@ -74,6 +74,13 @@ class Petsc(Package):
variant('int64', default=False,
description='Compile with 64bit indices')
+ # temporary workaround Clang 8.1.0 with XCode 8.3 on macOS, see
+ # https://bitbucket.org/petsc/petsc/commits/4f290403fdd060d09d5cb07345cbfd52670e3cbc
+ # the patch is an adaptation of the original commit to 3.7.5
+ if sys.platform == "darwin":
+ patch('macos-clang-8.1.0.diff',
+ when='@3.7.5%clang@8.1.0:')
+
# Virtual dependencies
# Git repository needs sowing to build Fortran interface
depends_on('sowing', when='@develop')
diff --git a/var/spack/repos/builtin/packages/pexsi/make.inc b/var/spack/repos/builtin/packages/pexsi/make.inc
index c97b09b424..ec619a7dba 100644
--- a/var/spack/repos/builtin/packages/pexsi/make.inc
+++ b/var/spack/repos/builtin/packages/pexsi/make.inc
@@ -63,7 +63,7 @@ CXXFLAGS = ${COMPILE_FLAG} ${CPPFLAG} ${PROFILE_FLAG} ${INCLUDES}
CCDEFS = ${COMPILE_DEF}
CPPDEFS = ${COMPILE_DEF}
LOADOPTS = ${PROFILE_FLAG} ${LIBS}
-FLOADOPTS = ${PROFILE_FLAG} ${LIBS} ${CPP_LIB}
+FLOADOPTS = @FLDFLAGS ${PROFILE_FLAG} ${LIBS} ${CPP_LIB}
# Generate auto-dependencies
%.d: %.c
diff --git a/var/spack/repos/builtin/packages/pexsi/package.py b/var/spack/repos/builtin/packages/pexsi/package.py
index 989e2ebf6e..04d22c4da8 100644
--- a/var/spack/repos/builtin/packages/pexsi/package.py
+++ b/var/spack/repos/builtin/packages/pexsi/package.py
@@ -30,7 +30,7 @@ import shutil
from spack import *
-class Pexsi(Package):
+class Pexsi(MakefilePackage):
"""The PEXSI library is written in C++, and uses message passing interface
(MPI) to parallelize the computation on distributed memory computing
systems and achieve scalability on more than 10,000 processors.
@@ -46,14 +46,20 @@ class Pexsi(Package):
homepage = 'https://math.berkeley.edu/~linlin/pexsi/index.html'
url = 'https://math.berkeley.edu/~linlin/pexsi/download/pexsi_v0.9.0.tar.gz'
+ version('0.9.2', '0ce491a3a922d271c4edf9b20aa93076')
version('0.9.0', '0c1a2de891ba1445dfc184b2fa270ed8')
depends_on('parmetis')
depends_on('superlu-dist@3.3', when='@0.9.0')
+ depends_on('superlu-dist@4.3', when='@0.9.2')
+
+ variant(
+ 'fortran', default=False, description='Builds the Fortran interface'
+ )
parallel = False
- def install(self, spec, prefix):
+ def edit(self, spec, prefix):
substitutions = {
'@MPICC': self.spec['mpi'].mpicc,
@@ -70,9 +76,13 @@ class Pexsi(Package):
'@LAPACK_LIBS': self.spec['lapack'].libs.joined(),
'@BLAS_LIBS': self.spec['blas'].libs.joined(),
# FIXME : what to do with compiler provided libraries ?
- '@STDCXX_LIB': ' '.join(self.compiler.stdcxx_libs)
+ '@STDCXX_LIB': ' '.join(self.compiler.stdcxx_libs),
+ '@FLDFLAGS': ''
}
+ if '@0.9.2' in self.spec:
+ substitutions['@FLDFLAGS'] = '-Wl,--allow-multiple-definition'
+
template = join_path(
os.path.dirname(inspect.getmodule(self).__file__),
'make.inc'
@@ -85,20 +95,29 @@ class Pexsi(Package):
for key, value in substitutions.items():
filter_file(key, value, makefile)
- make()
+ def build(self, spec, prefix):
+ super(Pexsi, self).build(spec, prefix)
+ if '+fortran' in self.spec:
+ make('-C', 'fortran')
+
+ def install(self, spec, prefix):
+
# 'make install' does not exist, despite what documentation says
mkdirp(self.prefix.lib)
+
install(
join_path(self.stage.source_path, 'src', 'libpexsi_linux.a'),
join_path(self.prefix.lib, 'libpexsi.a')
)
+
install_tree(
join_path(self.stage.source_path, 'include'),
self.prefix.include
)
+
# fortran "interface"
- make('-C', 'fortran')
- install_tree(
- join_path(self.stage.source_path, 'fortran'),
- join_path(self.prefix, 'fortran')
- )
+ if '+fortran' in self.spec:
+ install_tree(
+ join_path(self.stage.source_path, 'fortran'),
+ join_path(self.prefix, 'fortran')
+ )
diff --git a/var/spack/repos/builtin/packages/pigz/package.py b/var/spack/repos/builtin/packages/pigz/package.py
new file mode 100644
index 0000000000..7ba120417a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pigz/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Pigz(MakefilePackage):
+ """A parallel implementation of gzip for modern multi-processor,
+ multi-core machines."""
+
+ homepage = "http://zlib.net/pigz/"
+ url = "http://zlib.net/pigz/pigz-2.3.4.tar.gz"
+
+ version('2.3.4', '08e6b2e682bbf65ccf12c8966d633fc6')
+
+ depends_on('zlib')
+
+ def build(self, spec, prefix):
+ make()
+
+ def install(self, spec, prefix):
+ mkdirp(prefix.bin)
+ mkdirp(prefix.man1)
+ install('pigz', "%s/pigz" % prefix.bin)
+ install('pigz.1', "%s/pigz.1" % prefix.man1)
diff --git a/var/spack/repos/builtin/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py
index 4d677172c4..50b517f6d0 100644
--- a/var/spack/repos/builtin/packages/pkg-config/package.py
+++ b/var/spack/repos/builtin/packages/pkg-config/package.py
@@ -25,40 +25,40 @@
from spack import *
-class PkgConfig(Package):
+class PkgConfig(AutotoolsPackage):
"""pkg-config is a helper tool used when compiling applications
and libraries"""
homepage = "http://www.freedesktop.org/wiki/Software/pkg-config/"
- url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz"
+ url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.29.2.tar.gz"
+ version('0.29.2', 'f6e931e319531b736fadc017f470e68a')
version('0.29.1', 'f739a28cae4e0ca291f82d1d41ef107d')
version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d')
- parallel = False
variant('internal_glib', default=True,
description='Builds with internal glib')
# The following patch is needed for gcc-6.1
- patch('g_date_strftime.patch')
+ patch('g_date_strftime.patch', when='@:0.29.1')
+
+ parallel = False
- @when("platform=cray")
- def setup_dependent_environment(self, spack_env, run_env, dep_spec):
+ @when('platform=cray')
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
"""spack built pkg-config on cray's requires adding /usr/local/
and /usr/lib64/ to PKG_CONFIG_PATH in order to access cray '.pc'
files."""
- spack_env.prepend_path("PKG_CONFIG_PATH", "/usr/lib64/pkgconfig")
- spack_env.prepend_path("PKG_CONFIG_PATH", "/usr/local/lib64/pkgconfig")
+ spack_env.prepend_path('PKG_CONFIG_PATH', '/usr/lib64/pkgconfig')
+ spack_env.prepend_path('PKG_CONFIG_PATH', '/usr/local/lib64/pkgconfig')
+
+ def configure_args(self):
+ config_args = ['--enable-shared']
- def install(self, spec, prefix):
- args = ["--prefix={0}".format(prefix),
- "--enable-shared"]
- if "+internal_glib" in spec:
+ if '+internal_glib' in self.spec:
# There's a bootstrapping problem here;
# glib uses pkg-config as well, so break
# the cycle by using the internal glib.
- args.append("--with-internal-glib")
- configure(*args)
+ config_args.append('--with-internal-glib')
- make()
- make("install")
+ return config_args
diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py
index 80cc1aa66a..60443cbcc6 100644
--- a/var/spack/repos/builtin/packages/plumed/package.py
+++ b/var/spack/repos/builtin/packages/plumed/package.py
@@ -43,17 +43,16 @@ class Plumed(AutotoolsPackage):
homepage = 'http://www.plumed.org/'
url = 'https://github.com/plumed/plumed2/archive/v2.2.3.tar.gz'
+ version('2.3.0', 'a9b5728f115dca8f0519111f1f5a6fa5')
+ version('2.2.4', 'afb00da25a3fbd47acf377e53342059d')
version('2.2.3', 'a6e3863e40aac07eb8cf739cbd14ecf8')
# Variants. PLUMED by default builds a number of optional modules.
# The ones listed here are not built by default for various reasons,
# such as stability, lack of testing, or lack of demand.
- variant('crystallization', default=False,
- description='Build support for optional crystallization module.')
- variant('imd', default=False,
- description='Build support for optional imd module.')
- variant('manyrestraints', default=False,
- description='Build support for optional manyrestraints module.')
+ # FIXME: This needs to be an optional
+ variant('optional_modules', default='all',
+ description='String that is used to build optional modules')
variant('shared', default=True, description='Builds shared libraries')
variant('mpi', default=True, description='Activates MPI support')
variant('gsl', default=True, description='Activates GSL support')
@@ -73,6 +72,28 @@ class Plumed(AutotoolsPackage):
# Dictionary mapping PLUMED versions to the patches it provides
# interactively
plumed_patches = {
+ '2.3.0': {
+ 'amber-14': '1',
+ 'gromacs-2016.1': '2',
+ 'gromacs-4.5.7': '3',
+ 'gromacs-5.0.7': '4',
+ 'gromacs-5.1.4': '5',
+ 'lammps-6Apr13': '6',
+ 'namd-2.8': '7',
+ 'namd-2.9': '8',
+ 'espresso-5.0.2': '9'
+ },
+ '2.2.4': {
+ 'amber-14': '1',
+ 'gromacs-4.5.7': '2',
+ 'gromacs-4.6.7': '3',
+ 'gromacs-5.0.7': '4',
+ 'gromacs-5.1.2': '5',
+ 'lammps-6Apr13': '6',
+ 'namd-2.8': '7',
+ 'namd-2.9': '8',
+ 'espresso-5.0.2': '9'
+ },
'2.2.3': {
'amber-14': '1',
'gromacs-4.5.7': '2',
@@ -132,7 +153,7 @@ class Plumed(AutotoolsPackage):
# If the MPI dependency is provided by the intel-mpi package then
# the following additional argument is required to allow it to
# build.
- if spec.satisfies('^intel-mpi'):
+ if 'intel-mpi' in spec:
configure_opts.extend([
'STATIC_LIBS=-mt_mpi'
])
@@ -144,19 +165,16 @@ class Plumed(AutotoolsPackage):
])
# Construct list of optional modules
- module_opts = []
- module_opts.extend([
- '+crystallization' if (
- '+crystallization' in spec) else '-crystallization',
- '+imd' if '+imd' in spec else '-imd',
- '+manyrestraints' if (
- '+manyrestraints' in spec) else '-manyrestraints'
- ])
# If we have specified any optional modules then add the argument to
# enable or disable them.
- if module_opts:
- configure_opts.extend([
- '--enable-modules={0}'.format("".join(module_opts))])
+ optional_modules = self.spec.variants['optional_modules'].value
+ if optional_modules:
+ # From 'configure --help' @2.3:
+ # all/none/reset or : separated list such as
+ # +crystallization:-bias default: reset
+ configure_opts.append(
+ '--enable-modules={0}'.format(optional_modules)
+ )
return configure_opts
diff --git a/var/spack/repos/builtin/packages/prank/package.py b/var/spack/repos/builtin/packages/prank/package.py
index d627e8a0b6..09b73e795f 100644
--- a/var/spack/repos/builtin/packages/prank/package.py
+++ b/var/spack/repos/builtin/packages/prank/package.py
@@ -29,7 +29,7 @@ class Prank(Package):
"""A powerful multiple sequence alignment browser."""
homepage = "http://wasabiapp.org/software/prank/"
- url = "http://wasabiapp.org/download/prank/prank.source.140603.tgz"
+ url = "http://wasabiapp.org/download/prank/prank.source.150803.tgz"
version('150803', '71ac2659e91c385c96473712c0a23e8a')
diff --git a/var/spack/repos/builtin/packages/protobuf/package.py b/var/spack/repos/builtin/packages/protobuf/package.py
index 34d167b28c..e8010a496b 100644
--- a/var/spack/repos/builtin/packages/protobuf/package.py
+++ b/var/spack/repos/builtin/packages/protobuf/package.py
@@ -29,12 +29,14 @@ class Protobuf(AutotoolsPackage):
"""Google's data interchange format."""
homepage = "https://developers.google.com/protocol-buffers"
- url = "https://github.com/google/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.bz2"
+ url = "https://github.com/google/protobuf/archive/v3.2.0.tar.gz"
- version('3.0.2', '845b39e4b7681a2ddfd8c7f528299fbb', url='https://github.com/google/protobuf/archive/v3.0.2.tar.gz')
- version('2.5.0', 'a72001a9067a4c2c4e0e836d0f92ece4')
+ version('3.2.0', '61d899b8369781f6dd1e62370813392d')
+ version('3.1.0', '14a532a7538551d5def317bfca41dace')
+ version('3.0.2', '845b39e4b7681a2ddfd8c7f528299fbb')
+ version('2.5.0', '9c21577a03adc1879aba5b52d06e25cf')
- depends_on('m4', when='@3.0.2:')
- depends_on('autoconf', when='@3.0.2:')
- depends_on('automake', when='@3.0.2:')
- depends_on('libtool', when='@3.0.2:')
+ depends_on('automake', type='build')
+ depends_on('autoconf', type='build')
+ depends_on('libtool', type='build')
+ depends_on('m4', type='build')
diff --git a/var/spack/repos/builtin/packages/pruners-ninja/package.py b/var/spack/repos/builtin/packages/pruners-ninja/package.py
new file mode 100644
index 0000000000..effbd979c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pruners-ninja/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PrunersNinja(AutotoolsPackage):
+ """NINJA: Noise Inject agent tool to expose subtle and unintended message
+ races."""
+ homepage = "https://github.com/PRUNERS/NINJA"
+ url = "https://github.com/PRUNERS/NINJA/releases/download/v1.0.0/NINJA-1.0.0.tar.gz"
+
+ version("1.0.0", "fee53c4712ac521ebec3cd8692e5185a")
+
+ depends_on("mpi")
+ depends_on("autoconf", type='build')
+ depends_on("automake", type='build')
+ depends_on("libtool", type='build')
diff --git a/var/spack/repos/builtin/packages/py-abipy/package.py b/var/spack/repos/builtin/packages/py-abipy/package.py
new file mode 100644
index 0000000000..d113512f50
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-abipy/package.py
@@ -0,0 +1,77 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyAbipy(PythonPackage):
+ """Python package to automate ABINIT calculations and analyze
+ the results."""
+
+ homepage = "https://github.com/abinit/abipy"
+ url = "https://pypi.io/packages/source/a/abipy/abipy-0.2.0.tar.gz"
+
+ version('0.2.0', 'af9bc5cf7d5ca1a56ff73e2a65c5bcbd')
+
+ variant('gui', default=False, description='Build the GUI')
+ variant('ipython', default=False, description='Build IPython support')
+
+ extends('python', ignore='bin/(feff_.*|gaussian_analyzer|get_environment|html2text|nc3tonc4|nc4tonc3|ncinfo|pmg|pydii|tabulate|tqdm)')
+
+ depends_on('python@2.7:')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython', type='build')
+
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-prettytable', type=('build', 'run'))
+ depends_on('py-tabulate', type=('build', 'run'))
+ depends_on('py-apscheduler@2.1.0', type=('build', 'run'))
+ depends_on('py-pydispatcher@2.0.5:', type=('build', 'run'))
+ depends_on('py-tqdm', type=('build', 'run'))
+ depends_on('py-html2text', type=('build', 'run'))
+ depends_on('py-pyyaml@3.11:', type=('build', 'run'))
+ depends_on('py-pandas', type=('build', 'run'))
+ depends_on('py-numpy@1.9:', type=('build', 'run'))
+ depends_on('py-scipy@0.14:', type=('build', 'run'))
+ depends_on('py-spglib', type=('build', 'run'))
+ depends_on('py-pymatgen@4.7.2:', type=('build', 'run'))
+ depends_on('py-netcdf4', type=('build', 'run'))
+ depends_on('py-matplotlib@1.5:', type=('build', 'run'))
+ depends_on('py-seaborn', type=('build', 'run'))
+
+ depends_on('py-wxpython', type=('build', 'run'), when='+gui')
+ depends_on('py-wxmplot', type=('build', 'run'), when='+gui')
+
+ depends_on('py-ipython', type=('build', 'run'), when='+ipython')
+ depends_on('py-jupyter', type=('build', 'run'), when='+ipython')
+ depends_on('py-nbformat', type=('build', 'run'), when='+ipython')
+
+ def build_args(self, spec, prefix):
+ args = []
+
+ if '+ipython' in spec:
+ args.append('--with-ipython')
+
+ return args
diff --git a/var/spack/repos/builtin/packages/py-apscheduler/package.py b/var/spack/repos/builtin/packages/py-apscheduler/package.py
new file mode 100644
index 0000000000..96b3e0d474
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-apscheduler/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyApscheduler(PythonPackage):
+ """In-process task scheduler with Cron-like capabilities."""
+
+ homepage = "https://github.com/agronholm/apscheduler"
+ url = "https://pypi.io/packages/source/A/APScheduler/APScheduler-3.3.1.tar.gz"
+
+ version('3.3.1', '6342b3b78b41920a8aa54fd3cd4a299d')
+ version('2.1.0', 'b837d23822fc46651862dd2186ec361a')
+
+ depends_on('py-setuptools@0.7:', type='build')
+
+ depends_on('py-six@1.4.0:', type=('build', 'run'))
+ depends_on('py-pytz', type=('build', 'run'))
+ depends_on('py-tzlocal@1.2:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-autopep8/package.py b/var/spack/repos/builtin/packages/py-autopep8/package.py
index c892e2979c..6c92def415 100644
--- a/var/spack/repos/builtin/packages/py-autopep8/package.py
+++ b/var/spack/repos/builtin/packages/py-autopep8/package.py
@@ -30,10 +30,10 @@ class PyAutopep8(PythonPackage):
PEP 8 style guide."""
homepage = "https://github.com/hhatto/autopep8"
- url = "https://github.com/hhatto/autopep8/archive/v1.2.4.tar.gz"
+ url = "https://pypi.io/packages/source/a/autopep8/autopep8-1.2.4.tar.gz"
- version('1.2.4', '0458db85159a9e1b45f3e71ce6c158da')
- version('1.2.2', 'def3d023fc9dfd1b7113602e965ad8e1')
+ version('1.2.4', 'fcea19c0c5e505b425e2a78afb771f5c')
+ version('1.2.2', '3d97f9c89d14a0975bffd32a2c61c36c')
extends('python', ignore='bin/pep8')
depends_on('python@2.6:2.7,3.2:')
@@ -41,10 +41,3 @@ class PyAutopep8(PythonPackage):
depends_on('py-pycodestyle@1.5.7:1.7.0', type=('build', 'run'))
depends_on('py-setuptools', type='build')
-
- def url_for_version(self, version):
- url = "https://github.com/hhatto/autopep8/archive/{0}{1}.tar.gz"
- if version >= Version('1.2.3'):
- return url.format('v', version)
- else:
- return url.format('ver', version)
diff --git a/var/spack/repos/builtin/packages/py-bokeh/package.py b/var/spack/repos/builtin/packages/py-bokeh/package.py
new file mode 100644
index 0000000000..2394b376d5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-bokeh/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBokeh(PythonPackage):
+ """Statistical and novel interactive HTML plots for Python"""
+
+ homepage = "http://github.com/bokeh/bokeh"
+ url = "https://pypi.io/packages/source/b/bokeh/bokeh-0.12.2.tar.gz"
+
+ version('0.12.2', '2d1621bffe6e2ab9d42efbf733861c4f')
+
+ depends_on('python@2.6:')
+ depends_on('py-six@1.5.2:', type=('build', 'run'))
+ depends_on('py-requests@1.2.3:', type=('build', 'run'))
+ depends_on('py-pyyaml@3.10:', type=('build', 'run'))
+ depends_on('py-dateutil@2.1:', type=('build', 'run'))
+ depends_on('py-jinja2@2.7:', type=('build', 'run'))
+ depends_on('py-numpy@1.7.1:', type=('build', 'run'))
+ depends_on('py-tornado@4.3:', type=('build', 'run'))
+ depends_on('py-futures@3.0.3:', type=('build', 'run'),
+ when='^python@2.7:2.8')
diff --git a/var/spack/repos/builtin/packages/py-brian2/package.py b/var/spack/repos/builtin/packages/py-brian2/package.py
new file mode 100644
index 0000000000..28d1005432
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-brian2/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBrian2(PythonPackage):
+ """A clock-driven simulator for spiking neural networks"""
+
+ homepage = "http://www.briansimulator.org"
+ url = "https://pypi.io/packages/source/B/Brian2/Brian2-2.0.1.tar.gz"
+
+ version('2.0.1', 'df5990e9a71f7344887bc02f54dfd0f0')
+ version('2.0rc3', '3100c5e4eb9eb83a06ff0413a7d43152')
+
+ variant('docs', default=False)
+
+ # depends on py-setuptools@6: for windows, if spack targets windows,
+ # this will need to be added here
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy@1.8.2:', type=('build', 'run'))
+ depends_on('py-sympy@0.7.6:', type=('build', 'run'))
+ depends_on('py-pyparsing', type=('build', 'run'))
+ depends_on('py-jinja2@2.7:', type=('build', 'run'))
+ depends_on('py-cpuinfo@0.1.6:', type=('build', 'run'))
+
+ # depends_on('py-nosetests@1.0:', type=('build', 'run')) # extra test
+ depends_on('py-nosetests@1.0:', type=('build', 'run'), when='+docs')
+ depends_on('py-sphinx@1.4.2:', type=('build', 'run'), when='+docs')
diff --git a/var/spack/repos/builtin/packages/py-cdo/package.py b/var/spack/repos/builtin/packages/py-cdo/package.py
index 5eb8f414a8..2bf4a2623c 100644
--- a/var/spack/repos/builtin/packages/py-cdo/package.py
+++ b/var/spack/repos/builtin/packages/py-cdo/package.py
@@ -30,13 +30,12 @@ class PyCdo(PythonPackage):
Operators from Python."""
homepage = "https://pypi.python.org/pypi/cdo"
- url = "https://pypi.python.org/packages/sources/c/cdo/cdo-1.3.2.tar.gz"
+ url = "https://pypi.io/packages/source/c/cdo/cdo-1.3.2.tar.gz"
- version('1.3.2', '4b3686ec1b9b891f166c1c466c6db745',
- url="https://pypi.python.org/packages/d6/13/908e7c1451e1f5fb68405f341cdcb3196a16952ebfe1f172cb788f864aa9/cdo-1.3.2.tar.gz")
+ version('1.3.2', '4b3686ec1b9b891f166c1c466c6db745')
depends_on('cdo')
depends_on('py-setuptools', type='build')
depends_on('py-scipy', type=('build', 'run'))
- depends_on('py-netcdf', type=('build', 'run'))
+ depends_on('py-netcdf4', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/py-dev/__init__.py.patch b/var/spack/repos/builtin/packages/py-dev/__init__.py.patch
index 1c783b82c8..1c783b82c8 100644
--- a/var/spack/repos/builtin/py-dev/__init__.py.patch
+++ b/var/spack/repos/builtin/packages/py-dev/__init__.py.patch
diff --git a/var/spack/repos/builtin/py-dev/package.py b/var/spack/repos/builtin/packages/py-dev/package.py
index 449ed7dd80..449ed7dd80 100644
--- a/var/spack/repos/builtin/py-dev/package.py
+++ b/var/spack/repos/builtin/packages/py-dev/package.py
diff --git a/var/spack/repos/builtin/packages/py-easybuild-easyblocks/package.py b/var/spack/repos/builtin/packages/py-easybuild-easyblocks/package.py
new file mode 100644
index 0000000000..0f1aa923ec
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-easybuild-easyblocks/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2017, Kenneth Hoste
+#
+# This file is part of Spack.
+# Created by Kenneth Hoste, kenneth.hoste@gmail.com
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyEasybuildEasyblocks(PythonPackage):
+ """Collection of easyblocks for EasyBuild, a software build and
+ installation framework for (scientific) software on HPC systems.
+ """
+
+ homepage = 'http://hpcugent.github.io/easybuild/'
+ url = 'https://pypi.io/packages/source/e/easybuild-easyblocks/easybuild-easyblocks-3.1.2.tar.gz'
+
+ version('3.1.2', 'be08da30c07e67ed3e136e8d38905fbc')
+
+ depends_on('py-easybuild-framework@3.1:', when='@3.1:', type='run')
diff --git a/var/spack/repos/builtin/packages/py-easybuild-easyconfigs/package.py b/var/spack/repos/builtin/packages/py-easybuild-easyconfigs/package.py
new file mode 100644
index 0000000000..1631557731
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-easybuild-easyconfigs/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2017, Kenneth Hoste
+#
+# This file is part of Spack.
+# Created by Kenneth Hoste, kenneth.hoste@gmail.com
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyEasybuildEasyconfigs(PythonPackage):
+ """Collection of easyconfig files for EasyBuild, a software build and
+ installation framework for (scientific) software on HPC systems.
+ """
+
+ homepage = 'http://hpcugent.github.io/easybuild/'
+ url = 'https://pypi.io/packages/source/e/easybuild-easyconfigs/easybuild-easyconfigs-3.1.2.tar.gz'
+
+ version('3.1.2', '13a4a97fe8a5b9a94f885661cf497d13')
+
+ depends_on('py-easybuild-framework@3.1:', when='@3.1:', type='run')
+ depends_on('py-easybuild-easyblocks@3.1.2:', when='@3.1.2', type='run')
diff --git a/var/spack/repos/builtin/packages/py-easybuild-framework/package.py b/var/spack/repos/builtin/packages/py-easybuild-framework/package.py
new file mode 100644
index 0000000000..c8d89992c9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-easybuild-framework/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2017, Kenneth Hoste
+#
+# This file is part of Spack.
+# Created by Kenneth Hoste, kenneth.hoste@gmail.com
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyEasybuildFramework(PythonPackage):
+ """The core of EasyBuild, a software build and installation framework
+ for (scientific) software on HPC systems.
+ """
+
+ homepage = 'http://hpcugent.github.io/easybuild/'
+ url = 'https://pypi.io/packages/source/e/easybuild-framework/easybuild-framework-3.1.2.tar.gz'
+
+ version('3.1.2', '283bc5f6bdcb90016b32986d52fd04a8')
+
+ depends_on('python@2.6:2.9', type='run')
+ depends_on('py-setuptools', type=('build', 'run'))
+ depends_on('py-vsc-base@2.5.4:', when='@2.9:', type='run')
+ depends_on('py-vsc-install', type='run') # only required for tests (python -O -m test.framework.suite)
diff --git a/var/spack/repos/builtin/packages/py-html2text/package.py b/var/spack/repos/builtin/packages/py-html2text/package.py
new file mode 100644
index 0000000000..32341f328a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-html2text/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyHtml2text(PythonPackage):
+ """Turn HTML into equivalent Markdown-structured text."""
+
+ homepage = "https://github.com/Alir3z4/html2text/"
+ url = "https://pypi.io/packages/source/h/html2text/html2text-2016.9.19.tar.gz"
+
+ version('2016.9.19', 'd6b07e32ed21f186496f012691e02dd5')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-markdown/package.py b/var/spack/repos/builtin/packages/py-markdown/package.py
index 23c8167021..af10f1c5d3 100644
--- a/var/spack/repos/builtin/packages/py-markdown/package.py
+++ b/var/spack/repos/builtin/packages/py-markdown/package.py
@@ -50,7 +50,3 @@ class PyMarkdown(PythonPackage):
depends_on('py-setuptools', type='build')
depends_on('python@2.7:2.8,3.2:3.4')
-
- def url_for_version(self, version):
- base_url = "https://github.com/waylan/Python-Markdown/archive"
- return "{0}/{1}-final.tar.gz".format(base_url, version)
diff --git a/var/spack/repos/builtin/packages/py-mongo/package.py b/var/spack/repos/builtin/packages/py-mongo/package.py
new file mode 100644
index 0000000000..e5f1debbd0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-mongo/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyMongo(PythonPackage):
+ """Python driver for MongoDB <http://www.mongodb.org>"""
+
+ homepage = "http://github.com/mongodb/mongo-python-driver"
+ url = "https://pypi.io/packages/source/p/pymongo/pymongo-3.3.0.tar.gz"
+
+ version('3.3.0', '42cd12a5014fb7d3e1987ca04f5c651f')
+
+ depends_on('python@2.6:2.8,3.3:')
+
+ depends_on('setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-monty/package.py b/var/spack/repos/builtin/packages/py-monty/package.py
new file mode 100644
index 0000000000..19057d51d3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-monty/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyMonty(PythonPackage):
+ """Monty is the missing complement to Python."""
+
+ homepage = "https://github.com/materialsvirtuallab/monty"
+ url = "https://pypi.io/packages/source/m/monty/monty-0.9.6.tar.gz"
+
+ version('0.9.6', '406ea69fdd112feacfdf208624d56903')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-netcdf4/package.py b/var/spack/repos/builtin/packages/py-netcdf4/package.py
new file mode 100644
index 0000000000..e49cc5410b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-netcdf4/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyNetcdf4(PythonPackage):
+ """Python interface to the netCDF Library."""
+
+ homepage = "https://github.com/Unidata/netcdf4-python"
+ url = "https://pypi.io/packages/source/n/netCDF4/netCDF4-1.2.7.tar.gz"
+
+ version('1.2.7', '77b357d78f9658dd973dee901f6d86f8')
+ version('1.2.3.1', '24fc0101c7c441709c230e76af611d53')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython@0.19:', type='build')
+
+ depends_on('py-numpy@1.7:', type=('build', 'run'))
+
+ depends_on('netcdf')
+ depends_on('hdf5@1.8.0:')
diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py
index c78c52647a..f3f08029a4 100644
--- a/var/spack/repos/builtin/packages/py-nose/package.py
+++ b/var/spack/repos/builtin/packages/py-nose/package.py
@@ -31,8 +31,10 @@ class PyNose(PythonPackage):
homepage = "https://pypi.python.org/pypi/nose"
url = "https://pypi.io/packages/source/n/nose/nose-1.3.4.tar.gz"
- list_url = "https://pypi.python.org/pypi/nose/"
- list_depth = 2
+
+ import_modules = [
+ 'nose', 'nose.ext', 'nose.plugins', 'nose.sphinx', 'nose.tools'
+ ]
version('1.3.7', '4d3ad0ff07b61373d2cefc89c5d0b20b')
version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16')
diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py
index 3ed0d0bdb5..3b590fbd24 100644
--- a/var/spack/repos/builtin/packages/py-numpy/package.py
+++ b/var/spack/repos/builtin/packages/py-numpy/package.py
@@ -36,6 +36,18 @@ class PyNumpy(PythonPackage):
homepage = "http://www.numpy.org/"
url = "https://pypi.io/packages/source/n/numpy/numpy-1.9.1.tar.gz"
+ install_time_test_callbacks = ['install_test', 'import_module_test']
+
+ import_modules = [
+ 'numpy', 'numpy.compat', 'numpy.core', 'numpy.distutils', 'numpy.doc',
+ 'numpy.f2py', 'numpy.fft', 'numpy.lib', 'numpy.linalg', 'numpy.ma',
+ 'numpy.matrixlib', 'numpy.polynomial', 'numpy.random', 'numpy.testing',
+ 'numpy.distutils.command', 'numpy.distutils.fcompiler'
+ ]
+
+ # FIXME: numpy._build_utils and numpy.core.code_generators failed to import
+ # FIXME: Is this expected?
+
version('1.12.0', '33e5a84579f31829bbbba084fe0a4300',
url="https://pypi.io/packages/source/n/numpy/numpy-1.12.0.zip")
version('1.11.2', '03bd7927c314c43780271bf1ab795ebc')
@@ -53,6 +65,10 @@ class PyNumpy(PythonPackage):
depends_on('blas', when='+blas')
depends_on('lapack', when='+lapack')
+ # Tests require:
+ # TODO: Add a 'test' deptype
+ # depends_on('py-nose@1.0.0:', type='test')
+
def setup_dependent_package(self, module, dependent_spec):
python_version = self.spec['python'].version.up_to(2)
arch = '{0}-{1}'.format(platform.system().lower(), platform.machine())
@@ -132,3 +148,22 @@ class PyNumpy(PythonPackage):
args = ['-j', str(make_jobs)]
return args
+
+ def test(self):
+ # `setup.py test` is not supported. Use one of the following
+ # instead:
+ #
+ # - `python runtests.py` (to build and test)
+ # - `python runtests.py --no-build` (to test installed numpy)
+ # - `>>> numpy.test()` (run tests for installed numpy
+ # from within an interpreter)
+ pass
+
+ def install_test(self):
+ # Change directories due to the following error:
+ #
+ # ImportError: Error importing numpy: you should not try to import
+ # numpy from its source directory; please exit the numpy
+ # source tree, and relaunch your python interpreter from there.
+ with working_dir('..'):
+ python('-c', 'import numpy; numpy.test("full", verbose=2)')
diff --git a/var/spack/repos/builtin/packages/py-palettable/package.py b/var/spack/repos/builtin/packages/py-palettable/package.py
new file mode 100644
index 0000000000..b432d4ee28
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-palettable/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPalettable(PythonPackage):
+ """Color palettes for Python."""
+
+ homepage = "https://jiffyclub.github.io/palettable/"
+ url = "https://pypi.io/packages/source/p/palettable/palettable-3.0.0.tar.gz"
+
+ version('3.0.0', '6e430319fe01386c81dbbc62534e3cc4')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-proj/package.py b/var/spack/repos/builtin/packages/py-proj/package.py
index 949aab88c3..cf230eb49f 100644
--- a/var/spack/repos/builtin/packages/py-proj/package.py
+++ b/var/spack/repos/builtin/packages/py-proj/package.py
@@ -32,9 +32,8 @@ class PyProj(PythonPackage):
# This is not a tagged release of pyproj.
# The changes in this "version" fix some bugs, especially with Python3 use.
- version('1.9.5.1.1', 'd035e4bc704d136db79b43ab371b27d2',
- url='https://www.github.com/jswhit/pyproj/tarball/0be612cc9f972e38b50a90c946a9b353e2ab140f')
-
+ version('1.9.5.1.1', git='https://www.github.com/jswhit/pyproj.git',
+ commit='0be612cc9f972e38b50a90c946a9b353e2ab140f')
version('1.9.5.1', 'a4b80d7170fc82aee363d7f980279835')
depends_on('py-cython', type='build')
diff --git a/var/spack/repos/builtin/packages/py-py2cairo/package.py b/var/spack/repos/builtin/packages/py-py2cairo/package.py
index bb404c61f0..5626784e34 100644
--- a/var/spack/repos/builtin/packages/py-py2cairo/package.py
+++ b/var/spack/repos/builtin/packages/py-py2cairo/package.py
@@ -35,7 +35,7 @@ class PyPy2cairo(Package):
extends('python')
- depends_on('cairo+X')
+ depends_on('cairo')
depends_on('pixman')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/py-pydispatcher/package.py b/var/spack/repos/builtin/packages/py-pydispatcher/package.py
new file mode 100644
index 0000000000..198f6319ac
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pydispatcher/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPydispatcher(PythonPackage):
+ """Multi-producer-multi-consumer signal dispatching mechanism."""
+
+ homepage = "http://pydispatcher.sourceforge.net/"
+ url = "https://pypi.io/packages/source/P/PyDispatcher/PyDispatcher-2.0.5.tar.gz"
+
+ version('2.0.5', '1b9c2ca33580c2770577add7130b0b28')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pymatgen/package.py b/var/spack/repos/builtin/packages/py-pymatgen/package.py
new file mode 100644
index 0000000000..0ea4907b1b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pymatgen/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPymatgen(PythonPackage):
+ """Python Materials Genomics is a robust materials analysis code that
+ defines core object representations for structures and molecules with
+ support for many electronic structure codes. It is currently the core
+ analysis code powering the Materials Project."""
+
+ homepage = "http://www.pymatgen.org/"
+ url = "https://pypi.io/packages/source/p/pymatgen/pymatgen-4.7.2.tar.gz"
+
+ version('4.7.2', '9c3a6e8608671c216e4ef89778646fd6')
+ version('4.6.2', '508f77fdc3e783587348e93e4dfed1b8')
+
+ extends('python', ignore='bin/tabulate')
+
+ depends_on('py-setuptools@18.0:', type='build')
+
+ depends_on('py-numpy@1.9:', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-requests', type=('build', 'run'))
+ depends_on('py-pyyaml@3.11:', type=('build', 'run'))
+ depends_on('py-monty@0.9.6:', type=('build', 'run'))
+ depends_on('py-scipy@0.14:', type=('build', 'run'))
+ depends_on('py-pydispatcher@2.0.5:', type=('build', 'run'))
+ depends_on('py-tabulate', type=('build', 'run'))
+ depends_on('py-spglib@1.9.8.7:', type=('build', 'run'))
+ depends_on('py-matplotlib@1.5:', type=('build', 'run'))
+ depends_on('py-palettable@2.1.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pympler/package.py b/var/spack/repos/builtin/packages/py-pympler/package.py
new file mode 100644
index 0000000000..51c77f117d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pympler/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPympler(PythonPackage):
+ """Development tool to measure, monitor and analyze the memory behavior
+ of Python objects in a running Python application.
+ """
+
+ homepage = "https://github.com/pympler/pympler"
+ url = "https://pypi.io/packages/source/P/Pympler/Pympler-0.4.3.tar.gz"
+
+ version('0.4.3', 'bbb4239126e9c99e2effc83b02bf8755')
+ version('0.4.2', '6bdfd913ad4c94036e8a2b358e49abd7')
+ version('0.4.1', '2d54032a6da91ff438f48d5f36b719a6')
+ version('0.4', '68e4a8aa4a268996fa6a321b664918af')
+ version('0.3.1', '906ce437f46fb30991007671a59d4319')
+
+ depends_on('python@2.5:')
diff --git a/var/spack/repos/builtin/packages/py-pynn/package.py b/var/spack/repos/builtin/packages/py-pynn/package.py
new file mode 100644
index 0000000000..2e7aae6ec9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pynn/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPynn(PythonPackage):
+ """A Python package for simulator-independent specification of neuronal
+ network models
+ """
+
+ homepage = "http://neuralensemble.org/PyNN/"
+ url = "https://pypi.io/packages/source/P/PyNN/PyNN-0.8.3.tar.gz"
+
+ version('0.8.3', '28c63f898093806a57198e9271ed7b82')
+ version('0.8beta', git='https://github.com/NeuralEnsemble/PyNN.git',
+ commit='ffb0cb1661f2b0f2778db8f71865978fe7a7a6a4')
+ version('0.8.1', '7fb165ed5af35a115cb9c60991645ae6')
+ version('0.7.5', 'd8280544e4c9b34b40fd372b16342841')
+
+ depends_on('python@2.6:2.8,3.3:')
+ depends_on('py-lazyarray@0.2.9:', type=('build', 'run'))
+ depends_on('py-neo@:0.3', type=('build', 'run'))
+ depends_on('py-numpy@:1.5', type=('build', 'run'))
+ depends_on('py-jinja2@:2.6', type=('build', 'run'))
+ depends_on('py-quantities@:0.10', type=('build', 'run'))
+ depends_on('py-lazyarray@:0.2.9', type=('build', 'run'))
+ depends_on('py-neo@:0.3', type=('build', 'run'))
+ depends_on('py-numpy@1.5:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pypar/package.py b/var/spack/repos/builtin/packages/py-pypar/package.py
index 6ba999c063..c95698d83d 100644
--- a/var/spack/repos/builtin/packages/py-pypar/package.py
+++ b/var/spack/repos/builtin/packages/py-pypar/package.py
@@ -38,6 +38,3 @@ class PyPypar(PythonPackage):
depends_on('py-numpy', type=('build', 'run'))
build_directory = 'source'
-
- def url_for_version(self, version):
- return "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/pypar/pypar-%s.tgz" % version
diff --git a/var/spack/repos/builtin/packages/py-pytz/package.py b/var/spack/repos/builtin/packages/py-pytz/package.py
index 7a905f9f98..db97520fba 100644
--- a/var/spack/repos/builtin/packages/py-pytz/package.py
+++ b/var/spack/repos/builtin/packages/py-pytz/package.py
@@ -29,12 +29,12 @@ class PyPytz(PythonPackage):
"""World timezone definitions, modern and historical."""
homepage = "https://pypi.python.org/pypi/pytz"
- url = "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.gz"
+ url = "https://pypi.io/packages/source/p/pytz/pytz-2016.10.tar.gz"
- version('2016.6.1', 'b6c28a3b968bc1d8badfb61b93874e03',
- url="https://pypi.python.org/packages/5d/8e/6635d8f3f9f48c03bb925fab543383089858271f9cfd1216b83247e8df94/pytz-2016.6.1.tar.gz")
- version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7')
- version('2015.4', '417a47b1c432d90333e42084a605d3d8')
- version('2016.3', 'abae92c3301b27bd8a9f56b14f52cb29')
+ version('2016.10', 'cc9f16ba436efabdcef3c4d32ae4919c')
+ version('2016.6.1', 'b6c28a3b968bc1d8badfb61b93874e03')
+ version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7')
+ version('2015.4', '417a47b1c432d90333e42084a605d3d8')
+ version('2016.3', 'abae92c3301b27bd8a9f56b14f52cb29')
depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-rtree/package.py b/var/spack/repos/builtin/packages/py-rtree/package.py
index 55f98ad19e..a3604b467d 100644
--- a/var/spack/repos/builtin/packages/py-rtree/package.py
+++ b/var/spack/repos/builtin/packages/py-rtree/package.py
@@ -28,22 +28,9 @@ from spack import *
class PyRtree(PythonPackage):
"""Python interface to the RTREE.4 Library."""
homepage = "http://toblerity.org/rtree/"
- url = "https://github.com/Toblerity/rtree/tarball/0.8.2"
+ url = "https://pypi.io/packages/source/R/Rtree/Rtree-0.8.3.tar.gz"
- # Not an official release yet. But changes in here are required
- # to work with Spack. As it does with all packages, Spack
- # installs libspatialindex in a non-system location. Without the
- # changes in this fork, py-rtree requires an environment variables
- # to be set *at runtime*, in order to find libspatialindex. That
- # is not feasible within the Spack worldview.
- version('0.8.2.2', 'b1fe96a73153db49ea6ce45a063d82cb',
- url='https://github.com/citibeth/rtree/tarball/95a678cc7350857a1bb631bc41254efcd1fc0a0d')
-
- version('0.8.2.1', '394696ca849dd9f3a5ef24fb02a41ef4',
- url='https://github.com/citibeth/rtree/tarball/3a87d86f66a3955676b2507d3bf424ade938a22b')
-
- # Does not work with Spack
- # version('0.8.2', '593c7ac6babc397b8ba58f1636c1e0a0')
+ version('0.8.3', 'a27cb05a85eed0a3605c45ebccc432f8')
depends_on('py-setuptools', type='build')
depends_on('libspatialindex')
diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
index e61a90f352..844453944a 100644
--- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
@@ -30,8 +30,6 @@ class PyScikitLearn(PythonPackage):
homepage = "https://pypi.python.org/pypi/scikit-learn"
url = "https://pypi.io/packages/source/s/scikit-learn/scikit-learn-0.18.1.tar.gz"
- list_url = "https://pypi.python.org/pypi/scikit-learn"
- list_depth = 2
version('0.18.1', '6b0ff1eaa5010043895dd63d1e3c60c9')
version('0.15.2', 'd9822ad0238e17b382a3c756ea94fe0d')
diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py
index c506d4747d..c3ca24291f 100644
--- a/var/spack/repos/builtin/packages/py-scipy/package.py
+++ b/var/spack/repos/builtin/packages/py-scipy/package.py
@@ -33,6 +33,22 @@ class PyScipy(PythonPackage):
homepage = "http://www.scipy.org/"
url = "https://pypi.io/packages/source/s/scipy/scipy-0.18.1.tar.gz"
+ install_time_test_callbacks = ['install_test', 'import_module_test']
+
+ import_modules = [
+ 'scipy', 'scipy._build_utils', 'scipy._lib', 'scipy.cluster',
+ 'scipy.constants', 'scipy.fftpack', 'scipy.integrate',
+ 'scipy.interpolate', 'scipy.io', 'scipy.linalg', 'scipy.misc',
+ 'scipy.ndimage', 'scipy.odr', 'scipy.optimize', 'scipy.signal',
+ 'scipy.sparse', 'scipy.spatial', 'scipy.special', 'scipy.stats',
+ 'scipy.weave', 'scipy.io.arff', 'scipy.io.harwell_boeing',
+ 'scipy.io.matlab', 'scipy.optimize._lsq', 'scipy.sparse.csgraph',
+ 'scipy.sparse.linalg', 'scipy.sparse.linalg.dsolve',
+ 'scipy.sparse.linalg.eigen', 'scipy.sparse.linalg.isolve',
+ 'scipy.sparse.linalg.eigen.arpack', 'scipy.sparse.linalg.eigen.lobpcg',
+ 'scipy.special._precompute'
+ ]
+
version('0.19.0', '91b8396231eec780222a57703d3ec550',
url="https://pypi.io/packages/source/s/scipy/scipy-0.19.0.zip")
version('0.18.1', '5fb5fb7ccb113ab3a039702b6c2f3327')
@@ -49,6 +65,10 @@ class PyScipy(PythonPackage):
depends_on('blas')
depends_on('lapack')
+ # Tests require:
+ # TODO: Add a 'test' deptype
+ # depends_on('py-nose', type='test')
+
def build_args(self, spec, prefix):
args = []
@@ -59,3 +79,22 @@ class PyScipy(PythonPackage):
args.extend(['-j', str(make_jobs)])
return args
+
+ def test(self):
+ # `setup.py test` is not supported. Use one of the following
+ # instead:
+ #
+ # - `python runtests.py` (to build and test)
+ # - `python runtests.py --no-build` (to test installed scipy)
+ # - `>>> scipy.test()` (run tests for installed scipy
+ # from within an interpreter)
+ pass
+
+ def install_test(self):
+ # Change directories due to the following error:
+ #
+ # ImportError: Error importing scipy: you should not try to import
+ # scipy from its source directory; please exit the scipy
+ # source tree, and relaunch your python interpreter from there.
+ with working_dir('..'):
+ python('-c', 'import scipy; scipy.test("full", verbose=2)')
diff --git a/var/spack/repos/builtin/packages/py-seaborn/package.py b/var/spack/repos/builtin/packages/py-seaborn/package.py
new file mode 100644
index 0000000000..3171ed2e21
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-seaborn/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySeaborn(PythonPackage):
+ """Seaborn: statistical data visualization.
+
+ Seaborn is a library for making attractive and informative statistical
+ graphics in Python. It is built on top of matplotlib and tightly
+ integrated with the PyData stack, including support for numpy and pandas
+ data structures and statistical routines from scipy and statsmodels."""
+
+ homepage = "http://seaborn.pydata.org/"
+ url = "https://pypi.io/packages/source/s/seaborn/seaborn-0.7.1.tar.gz"
+
+ version('0.7.1', 'ef07e29e0f8a1f2726abe506c1a36e93')
+
+ depends_on('py-setuptools', type='build')
+
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-pandas', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py
index af1ea9bf06..94ee8a7fc4 100644
--- a/var/spack/repos/builtin/packages/py-setuptools/package.py
+++ b/var/spack/repos/builtin/packages/py-setuptools/package.py
@@ -32,6 +32,12 @@ class PySetuptools(PythonPackage):
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.io/packages/source/s/setuptools/setuptools-25.2.0.tar.gz"
+ import_modules = [
+ 'pkg_resources', 'setuptools', 'pkg_resources.extern',
+ 'pkg_resources._vendor', 'pkg_resources._vendor.packaging',
+ 'setuptools.extern', 'setuptools.command'
+ ]
+
version('34.2.0', '41b630da4ea6cfa5894d9eb3142922be',
url="https://pypi.io/packages/source/s/setuptools/setuptools-34.2.0.zip")
version('25.2.0', 'a0dbb65889c46214c691f6c516cf959c')
@@ -53,3 +59,11 @@ class PySetuptools(PythonPackage):
depends_on('py-packaging@16.8:', when='@34.0.0:', type=('build', 'run'))
depends_on('py-six@1.6.0:', when='@34.0.0:', type=('build', 'run'))
depends_on('py-appdirs@1.4.0:', when='@34.0.0:', type=('build', 'run'))
+
+ # Tests require:
+ # TODO: Add a 'test' deptype
+ # FIXME: All of these depend on setuptools, creating a dependency loop
+ # FIXME: Is there any way around this problem?
+ # depends_on('py-pytest-flake8', type='test')
+ # depends_on('pytest@2.8:', type='test')
+ # depends_on('py-mock', when='^python@:3.2', type='test')
diff --git a/var/spack/repos/builtin/packages/py-netcdf/package.py b/var/spack/repos/builtin/packages/py-spglib/package.py
index 2d35320ca0..19380b745b 100644
--- a/var/spack/repos/builtin/packages/py-netcdf/package.py
+++ b/var/spack/repos/builtin/packages/py-spglib/package.py
@@ -25,14 +25,14 @@
from spack import *
-class PyNetcdf(PythonPackage):
- """Python interface to the netCDF Library."""
- homepage = "http://unidata.github.io/netcdf4-python"
- url = "https://github.com/Unidata/netcdf4-python/tarball/v1.2.3.1rel"
+class PySpglib(PythonPackage):
+ """Python bindings for C library for finding and handling
+ crystal symmetries."""
- version('1.2.3.1', '4fc4320d4f2a77b894ebf8da1c9895af')
+ homepage = "http://atztogo.github.io/spglib/"
+ url = "https://pypi.io/packages/source/s/spglib/spglib-1.9.9.18.tar.gz"
+ version('1.9.9.18', 'b8b46268d3aeada7b9b201b11882548f')
+
+ depends_on('py-setuptools@18.0:', type='build')
depends_on('py-numpy', type=('build', 'run'))
- depends_on('py-cython', type=('build', 'run'))
- depends_on('py-setuptools', type=('build', 'run'))
- depends_on('netcdf')
diff --git a/var/spack/repos/builtin/packages/py-spykeutils/package.py b/var/spack/repos/builtin/packages/py-spykeutils/package.py
new file mode 100644
index 0000000000..2aa0cabe46
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-spykeutils/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySpykeutils(PythonPackage):
+ """Utilities for analyzing electrophysiological data"""
+
+ homepage = "https://github.com/rproepp/spykeutils"
+ url = "https://pypi.io/packages/source/s/spykeutils/spykeutils-0.4.3.tar.gz"
+
+ version('0.4.3', 'cefe4c48ebfdb9bac7a6cbfaf49dd485')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-quantities', type=('build', 'run'))
+ depends_on('py-neo@0.2.1:0.3.99', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-tzlocal/package.py b/var/spack/repos/builtin/packages/py-tzlocal/package.py
new file mode 100644
index 0000000000..d17fd62a52
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tzlocal/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyTzlocal(PythonPackage):
+ """tzinfo object for the local timezone."""
+
+ homepage = "https://github.com/regebro/tzlocal"
+ url = "https://pypi.io/packages/source/t/tzlocal/tzlocal-1.3.tar.gz"
+
+ version('1.3', '3cb544b3975b59f91a793850a072d4a8')
+
+ depends_on('py-setuptools', type='build')
+
+ depends_on('py-pytz', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-vsc-base/package.py b/var/spack/repos/builtin/packages/py-vsc-base/package.py
new file mode 100644
index 0000000000..e5e23b0015
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-vsc-base/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2017, Kenneth Hoste
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyVscBase(PythonPackage):
+ """Common Python libraries tools created by HPC-UGent"""
+
+ homepage = 'https://github.com/hpcugent/vsc-base/'
+ url = 'https://pypi.io/packages/source/v/vsc-base/vsc-base-2.5.8.tar.gz'
+
+ version('2.5.8', '57f3f49eab7aa15a96be76e6c89a72d8')
+
+ depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-vsc-install/package.py b/var/spack/repos/builtin/packages/py-vsc-install/package.py
new file mode 100644
index 0000000000..452bf97992
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-vsc-install/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2017, Kenneth Hoste
+#
+# This file is part of Spack.
+# Created by Kenneth Hoste, kenneth.hoste@gmail.com
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyVscInstall(PythonPackage):
+ """Shared setuptools functions and classes
+ for Python libraries developed by HPC-UGent.
+ """
+
+ homepage = 'https://github.com/hpcugent/vsc-install/'
+ url = 'https://pypi.io/packages/source/v/vsc-install/vsc-install-0.10.25.tar.gz'
+
+ version('0.10.25', 'd1b9453a75cb56dba0deb7a878047b51')
+
+ depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-wrapt/package.py b/var/spack/repos/builtin/packages/py-wrapt/package.py
new file mode 100644
index 0000000000..65d0f3fc11
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-wrapt/package.py
@@ -0,0 +1,34 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyWrapt(PythonPackage):
+ """Module for decorators, wrappers and monkey patching."""
+
+ homepage = "https://github.com/GrahamDumpleton/wrapt"
+ url = "https://pypi.io/packages/source/w/wrapt/wrapt-1.10.10.tar.gz"
+
+ version('1.10.10', '97365e906afa8b431f266866ec4e2e18')
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index 15f7f4f987..6e0b5b8dc8 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -42,7 +42,7 @@ class Python(Package):
homepage = "http://www.python.org"
url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tgz"
list_url = "https://www.python.org/downloads/"
- list_depth = 2
+ list_depth = 1
version('3.6.0', '3f7062ccf8be76491884d0e47ac8b251')
version('3.5.2', '3fe8434643a78630c61c6464fe2e7e72')
@@ -99,11 +99,6 @@ class Python(Package):
r'\1setup.py\2 --no-user-cfg \3\6'
)
- @when('@:2.6,3.0:3.3')
- def patch(self):
- # See https://github.com/LLNL/spack/issues/1490
- pass
-
def install(self, spec, prefix):
# TODO: The '--no-user-cfg' option for Python installation is only in
# Python v2.7 and v3.4+ (see https://bugs.python.org/issue1180) and
@@ -470,19 +465,14 @@ class Python(Package):
else:
with closing(open(main_pth, 'w')) as f:
- f.write("""
-import sys
-sys.__plen = len(sys.path)
-""")
+ f.write("import sys; sys.__plen = len(sys.path)\n")
for path in paths:
f.write("{0}\n".format(path))
- f.write("""
-new = sys.path[sys.__plen:]
-del sys.path[sys.__plen:]
-p = getattr(sys, '__egginsert', 0)
-sys.path[p:p] = new
-sys.__egginsert = p + len(new)
-""")
+ f.write("import sys; new=sys.path[sys.__plen:]; "
+ "del sys.path[sys.__plen:]; "
+ "p=getattr(sys,'__egginsert',0); "
+ "sys.path[p:p]=new; "
+ "sys.__egginsert = p+len(new)\n")
def activate(self, ext_pkg, **args):
ignore = self.python_ignore(ext_pkg, args)
diff --git a/var/spack/repos/builtin/packages/qbank/package.py b/var/spack/repos/builtin/packages/qbank/package.py
new file mode 100644
index 0000000000..976bda8fbd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qbank/package.py
@@ -0,0 +1,87 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Qbank(Package):
+ """QBank is a unique dynamic reservation-based allocation management system
+ that manages the utilization of computational resources in a multi-project
+ environment. It is used in conjunction with a resource management system
+ allowing an organization to guarantee greater fairness and enforce mission
+ priorities by associating a charge with the use of computational resources
+ and allocating resource credits which limit how much of the resources may
+ be used at what time and by whom. It tracks resource utilization and allows
+ for insightful planning."""
+
+ # QBank is so old that it no longer has (never had?) a homepage
+ # but it was developed at Pacific Northwest National Laboratory
+ # by Scott Jackson <Scott.Jackson@pnl.gov>
+ homepage = "http://www.pnnl.gov/"
+ url = "file://{0}/qbank-2.10.4.tar.gz".format(os.getcwd())
+
+ version('2.10.4', '0820587353e63d32ddb49689dd4289e7')
+
+ variant('doc', default=False, description='Build documentation')
+
+ depends_on('openssl')
+
+ depends_on('perl@5.6:5.16', type=('build', 'run'))
+ depends_on('perl-dbi@1.00:', type=('build', 'run'))
+
+ phases = ['configure', 'build', 'install']
+
+ def configure_args(self):
+ prefix = self.prefix
+
+ config_args = [
+ '--prefix', prefix,
+ '--logdir', join_path(prefix, 'var', 'log', 'qbank')
+ ]
+
+ return config_args
+
+ def configure(self, spec, prefix):
+ perl = which('perl')
+ perl('configure', *self.configure_args())
+
+ def build(self, spec, prefix):
+ make()
+
+ if '+doc' in spec:
+ make('docs')
+
+ def install(self, spec, prefix):
+ make('install')
+
+ if '+doc' in spec:
+ install_tree('doc', join_path(prefix, 'doc'))
+
+ def setup_environment(self, spack_env, run_env):
+ spec = self.spec
+ prefix = self.prefix
+
+ if '+doc' in spec:
+ run_env.prepend_path('MANPATH', join_path(prefix, 'doc'))
diff --git a/var/spack/repos/builtin/packages/qt-creator/package.py b/var/spack/repos/builtin/packages/qt-creator/package.py
index 347cf4d6ee..abd619530f 100644
--- a/var/spack/repos/builtin/packages/qt-creator/package.py
+++ b/var/spack/repos/builtin/packages/qt-creator/package.py
@@ -32,7 +32,7 @@ class QtCreator(Package):
url = 'http://download.qt.io/official_releases/qtcreator/4.1/4.1.0/qt-creator-opensource-src-4.1.0.tar.gz'
list_url = 'http://download.qt.io/official_releases/qtcreator/'
- list_depth = 3
+ list_depth = 2
version('4.1.0', '657727e4209befa4bf5889dff62d9e0a')
diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py
index 60c5f15ece..b27bc3fe07 100644
--- a/var/spack/repos/builtin/packages/qt/package.py
+++ b/var/spack/repos/builtin/packages/qt/package.py
@@ -33,7 +33,7 @@ class Qt(Package):
homepage = 'http://qt.io'
url = 'http://download.qt.io/archive/qt/5.7/5.7.0/single/qt-everywhere-opensource-src-5.7.0.tar.gz'
list_url = 'http://download.qt.io/archive/qt/'
- list_depth = 4
+ list_depth = 3
version('5.7.1', '031fb3fd0c3cc0f1082644492683f18d')
version('5.7.0', '9a46cce61fc64c20c3ac0a0e0fa41b42')
@@ -251,7 +251,7 @@ class Qt(Package):
# Don't disable all the database drivers, but should
# really get them into spack at some point.
- @when('@3')
+ @when('@3') # noqa: F811
def configure(self):
# A user reported that this was necessary to link Qt3 on ubuntu.
# However, if LD_LIBRARY_PATH is not set the qt build fails, check
@@ -268,7 +268,7 @@ class Qt(Package):
'-release',
'-fast')
- @when('@4')
+ @when('@4') # noqa: F811
def configure(self):
configure('-fast',
'-{0}gtkstyle'.format('' if '+gtk' in self.spec else 'no-'),
@@ -276,7 +276,7 @@ class Qt(Package):
'-arch', str(self.spec.architecture.target),
*self.common_config_args)
- @when('@5.0:5.6')
+ @when('@5.0:5.6') # noqa: F811
def configure(self):
webkit_args = [] if '+webkit' in self.spec else ['-skip', 'qtwebkit']
configure('-no-eglfs',
@@ -284,7 +284,7 @@ class Qt(Package):
'-{0}gtkstyle'.format('' if '+gtk' in self.spec else 'no-'),
*(webkit_args + self.common_config_args))
- @when('@5.7:')
+ @when('@5.7:') # noqa: F811
def configure(self):
config_args = self.common_config_args
diff --git a/var/spack/repos/builtin/packages/r-lava/package.py b/var/spack/repos/builtin/packages/r-lava/package.py
index c38f9003ea..263e859c48 100644
--- a/var/spack/repos/builtin/packages/r-lava/package.py
+++ b/var/spack/repos/builtin/packages/r-lava/package.py
@@ -29,7 +29,7 @@ class RLava(RPackage):
"""Estimation and simulation of latent variable models."""
homepage = "https://cran.r-project.org/package=lava"
- url = "https://cran.r-project.org/src/contrib/lava_1.4.6.tar.gz"
+ url = "https://cran.r-project.org/src/contrib/lava_1.4.7.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/lava"
version('1.4.7', '28039248a7039ba9281d172e4dbf9543')
diff --git a/var/spack/repos/builtin/packages/r-packrat/package.py b/var/spack/repos/builtin/packages/r-packrat/package.py
index 726a6640e8..ff66ddaf39 100644
--- a/var/spack/repos/builtin/packages/r-packrat/package.py
+++ b/var/spack/repos/builtin/packages/r-packrat/package.py
@@ -33,4 +33,5 @@ class RPackrat(RPackage):
url = "https://cran.r-project.org/src/contrib/packrat_0.4.7-1.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/packrat"
+ version('0.4.8-1', '14e82feba55fcda923396282fc490038')
version('0.4.7-1', '80c2413269b292ade163a70ba5053e84')
diff --git a/var/spack/repos/builtin/packages/rempi/package.py b/var/spack/repos/builtin/packages/rempi/package.py
new file mode 100644
index 0000000000..d93dbfa722
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rempi/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Rempi(AutotoolsPackage):
+ """ReMPI is a record-and-replay tool for MPI applications."""
+ homepage = "https://github.com/PRUNERS/ReMPI"
+ url = "https://github.com/PRUNERS/ReMPI/releases/download/v1.0.0/ReMPI-1.0.0.tar.gz"
+
+ version("1.0.0", "32c780a6a74627b5796bea161d4c4733")
+
+ depends_on("mpi")
+ depends_on("zlib")
+ depends_on("autoconf", type='build')
+ depends_on("automake", type='build')
+ depends_on("libtool", type='build')
diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py
index a96d7f6bbc..a5939ace8f 100644
--- a/var/spack/repos/builtin/packages/root/package.py
+++ b/var/spack/repos/builtin/packages/root/package.py
@@ -30,7 +30,7 @@ import sys
class Root(Package):
"""ROOT is a data analysis framework."""
homepage = "https://root.cern.ch"
- url = "https://root.cern.ch/download/root_v6.07.02.source.tar.gz"
+ url = "https://root.cern.ch/download/root_v6.06.06.source.tar.gz"
version('6.06.06', '4308449892210c8d36e36924261fea26')
version('6.06.04', '55a2f98dd4cea79c9c4e32407c2d6d17')
@@ -83,7 +83,3 @@ class Root(Package):
spack_env.set('ROOTSYS', self.prefix)
spack_env.set('ROOT_VERSION', 'v6')
spack_env.prepend_path('PYTHONPATH', self.prefix.lib)
-
- def url_for_version(self, version):
- """Handle ROOT's unusual version string."""
- return "https://root.cern.ch/download/root_v%s.source.tar.gz" % version
diff --git a/var/spack/repos/builtin/packages/rose/package.py b/var/spack/repos/builtin/packages/rose/package.py
index 02b09f0126..5f0d12427c 100644
--- a/var/spack/repos/builtin/packages/rose/package.py
+++ b/var/spack/repos/builtin/packages/rose/package.py
@@ -35,10 +35,11 @@ class Rose(Package):
(Developed at Lawrence Livermore National Lab)"""
homepage = "http://rosecompiler.org/"
- url = "https://github.com/rose-compiler/edg4x-rose"
+ url = "https://github.com/rose-compiler/rose/archive/v0.9.7.tar.gz"
+ version('0.9.7', 'e14ce5250078df4b09f4f40559d46c75')
version('master', branch='master',
- git='https://github.com/rose-compiler/edg4x-rose.git')
+ git='https://github.com/rose-compiler/rose.git')
patch('add_spack_compiler_recognition.patch')
@@ -46,7 +47,7 @@ class Rose(Package):
depends_on("automake@1.14", type='build')
depends_on("libtool@2.4", type='build')
depends_on("boost@1.54.0")
- depends_on("jdk@8u25-linux-x64")
+ depends_on("jdk@8u25")
def install(self, spec, prefix):
# Bootstrap with autotools
diff --git a/var/spack/repos/builtin/packages/rust-bindgen/package.py b/var/spack/repos/builtin/packages/rust-bindgen/package.py
index c411bc15d1..00ccbb71cf 100644
--- a/var/spack/repos/builtin/packages/rust-bindgen/package.py
+++ b/var/spack/repos/builtin/packages/rust-bindgen/package.py
@@ -29,9 +29,9 @@ import os
class RustBindgen(Package):
"""The rust programming language toolchain"""
homepage = "http://www.rust-lang.org"
- url = "https://github.com/crabtw/rust-bindgen"
+ url = "https://github.com/servo/rust-bindgen/archive/v0.20.5.tar.gz"
- version('0.16', tag='0.16', git='https://github.com/crabtw/rust-bindgen')
+ version('0.20.5', '3e4d70a5bec540324fdd95bc9e82bebc')
extends("rust")
depends_on("llvm")
diff --git a/var/spack/repos/builtin/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py
index b82a7e55d6..a616725068 100644
--- a/var/spack/repos/builtin/packages/samtools/package.py
+++ b/var/spack/repos/builtin/packages/samtools/package.py
@@ -33,11 +33,14 @@ class Samtools(Package):
homepage = "www.htslib.org"
url = "https://github.com/samtools/samtools/releases/download/1.3.1/samtools-1.3.1.tar.bz2"
+ version('1.4', '8cbd7d2a0ec16d834babcd6c6d85d691')
version('1.3.1', 'a7471aa5a1eb7fc9cc4c6491d73c2d88')
version('1.2', '988ec4c3058a6ceda36503eebecd4122')
depends_on("ncurses")
- depends_on("htslib", when='@1.3.1:') # htslib became standalone
+ # htslib became standalone @1.3.1, must use corresponding version
+ depends_on("htslib@1.4", when='@1.4')
+ depends_on("htslib@1.3.1", when='@1.3.1')
depends_on('zlib', when='@1.2') # needed for builtin htslib
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py
index 6b3345d83b..1a5a591c3f 100644
--- a/var/spack/repos/builtin/packages/scorep/package.py
+++ b/var/spack/repos/builtin/packages/scorep/package.py
@@ -32,14 +32,11 @@ class Scorep(Package):
"""
homepage = "http://www.vi-hps.org/projects/score-p"
- url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz"
+ url = "http://www.vi-hps.org/upload/packages/scorep/scorep-2.0.2.tar.gz"
- version('2.0.2', '8f00e79e1b5b96e511c5ebecd10b2888',
- url='http://www.vi-hps.org/upload/packages/scorep/scorep-2.0.2.tar.gz')
- version('1.4.2', '3b9a042b13bdd5836452354e6567f71e',
- url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.4.2.tar.gz')
- version('1.3', '9db6f957b7f51fa01377a9537867a55c',
- url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz')
+ version('2.0.2', '8f00e79e1b5b96e511c5ebecd10b2888')
+ version('1.4.2', '3b9a042b13bdd5836452354e6567f71e')
+ version('1.3', '9db6f957b7f51fa01377a9537867a55c')
##########
# Dependencies for SCORE-P are quite tight. See the homepage for more
diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py
index b878349485..8efb629487 100644
--- a/var/spack/repos/builtin/packages/scotch/package.py
+++ b/var/spack/repos/builtin/packages/scotch/package.py
@@ -31,8 +31,7 @@ class Scotch(Package):
partitioning, graph clustering, and sparse matrix ordering."""
homepage = "http://www.labri.fr/perso/pelegrin/scotch/"
- url = "http://gforge.inria.fr/frs/download.php/latestfile/298/scotch_6.0.3.tar.gz" # noqa: E501
- base_url = "http://gforge.inria.fr/frs/download.php/latestfile/298"
+ url = "http://gforge.inria.fr/frs/download.php/latestfile/298/scotch_6.0.4.tar.gz"
list_url = "http://gforge.inria.fr/frs/?group_id=248"
version('6.0.4', 'd58b825eb95e1db77efe8c6ff42d329f')
@@ -71,12 +70,10 @@ class Scotch(Package):
# from the Scotch hosting site. These alternative archives include a
# superset of the behavior in their default counterparts, so we choose to
# always grab these versions for older Scotch versions for simplicity.
- def url_for_version(self, version):
- return super(Scotch, self).url_for_version(version)
-
@when('@:6.0.0')
def url_for_version(self, version):
- return '%s/scotch_%s_esmumps.tar.gz' % (Scotch.base_url, version)
+ url = "http://gforge.inria.fr/frs/download.php/latestfile/298/scotch_{0}_esmumps.tar.gz"
+ return url.format(version)
def patch(self):
self.configure()
diff --git a/var/spack/repos/builtin/packages/shiny-server/package.py b/var/spack/repos/builtin/packages/shiny-server/package.py
new file mode 100644
index 0000000000..941921c795
--- /dev/null
+++ b/var/spack/repos/builtin/packages/shiny-server/package.py
@@ -0,0 +1,77 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class ShinyServer(CMakePackage):
+ """Shiny server lets you put shiny web applications and interactive
+ documents online. Take your shiny apps and share them with your
+ organization or the world."""
+
+ #
+ # HEADS UP:
+ # 1. The shiny server installation step will download various node
+ # and npm bits from the net. They seem to have them well
+ # constrained ("npm shrinkwrap"?), but this package is not
+ # "air gappable".
+ # 2. Docs say that it requires 'gcc'. depends_on() won't do the
+ # right thing, it's Up To You.
+ #
+ homepage = "https://www.rstudio.com/products/shiny/shiny-server/"
+ url = "https://github.com/rstudio/shiny-server/archive/v1.5.3.838.tar.gz"
+
+ version('1.5.3.838', '96f20fdcdd94c9e9bb851baccb82b97f')
+
+ depends_on('python@:2.9.99') # docs say: "Really. 3.x will not work"
+ depends_on('cmake@2.8.10:')
+ depends_on('git')
+ depends_on('r+X')
+ depends_on('openssl')
+
+ def cmake_args(self):
+ spec = self.spec
+ options = []
+
+ options.extend([
+ "-DPYTHON=%s" % join_path(spec['python'].prefix.bin, 'python'),
+ ])
+
+ return options
+
+ # Recompile the npm modules included in the project
+ @run_after('build')
+ def build_node(self):
+ bash = which('bash')
+ mkdirp('build')
+ bash('-c', 'bin/npm --python="$PYTHON" install')
+ bash('-c', 'bin/node ./ext/node/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js --python="$PYTHON" rebuild') # noqa: E501
+
+ def setup_environment(self, spack_env, run_env):
+ run_env.prepend_path('PATH',
+ join_path(self.prefix, 'shiny-server', 'bin'))
+ # shiny comes with its own pandoc; hook it up...
+ run_env.prepend_path('PATH',
+ join_path(self.prefix, 'shiny-server',
+ 'ext', 'pandoc', 'static'))
diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py
index 6a9326517b..eca5d1a605 100644
--- a/var/spack/repos/builtin/packages/silo/package.py
+++ b/var/spack/repos/builtin/packages/silo/package.py
@@ -30,7 +30,7 @@ class Silo(Package):
data to binary, disk files."""
homepage = "http://wci.llnl.gov/simulation/computer-codes/silo"
- base_url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo"
+ url = "https://wci.llnl.gov/content/assets/docs/simulation/computer-codes/silo/silo-4.10.2/silo-4.10.2.tar.gz"
version('4.10.2', '9ceac777a2f2469ac8cef40f4fab49c8')
version('4.9', 'a83eda4f06761a86726e918fc55e782a')
@@ -67,6 +67,3 @@ class Silo(Package):
make()
make('install')
-
- def url_for_version(self, version):
- return '%s/silo-%s/silo-%s.tar.gz' % (Silo.base_url, version, version)
diff --git a/var/spack/repos/builtin/packages/snakemake/package.py b/var/spack/repos/builtin/packages/snakemake/package.py
new file mode 100644
index 0000000000..0970b88f9c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/snakemake/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Snakemake(PythonPackage):
+ """Snakemake is an MIT-licensed workflow management system."""
+
+ homepage = "https://snakemake.readthedocs.io/en/stable/"
+ url = "https://pypi.io/packages/source/s/snakemake/snakemake-3.11.2.tar.gz"
+
+ version('3.11.2', '6bf834526078522b38d271fdf73e6b22')
+
+ depends_on('python@3.3:')
+ depends_on('py-requests', type=('build', 'run'))
+ depends_on('py-setuptools', type=('build', 'run'))
+ depends_on('py-wrapt', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/speex/package.py b/var/spack/repos/builtin/packages/speex/package.py
new file mode 100644
index 0000000000..b8850e801f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/speex/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Speex(AutotoolsPackage):
+ """Speex is an Open Source/Free Software patent-free
+ audio compression format designed for speech."""
+
+ homepage = "https://speex.org"
+ url = "http://downloads.us.xiph.org/releases/speex/speex-1.2.0.tar.gz"
+
+ version('1.2.0', '8ab7bb2589110dfaf0ed7fa7757dc49c')
diff --git a/var/spack/repos/builtin/packages/sph2pipe/cmake.patch b/var/spack/repos/builtin/packages/sph2pipe/cmake.patch
new file mode 100644
index 0000000000..e5ff6dfc0c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sph2pipe/cmake.patch
@@ -0,0 +1,10 @@
+diff -ruN sph2pipe_v2.5.ori/CMakeLists.txt sph2pipe_v2.5/CMakeLists.txt
+--- sph2pipe_v2.5.ori/CMakeLists.txt 1970-01-01 08:00:00.000000000 +0800
++++ sph2pipe_v2.5/CMakeLists.txt 2017-03-27 17:33:27.000000000 +0800
+@@ -0,0 +1,6 @@
++PROJECT (SPH2PIPE)
++SET(SRC_LIST file_headers.c shorten_x.c sph2pipe.c)
++ADD_EXECUTABLE(sph2pipe ${SRC_LIST})
++TARGET_LINK_LIBRARIES(sph2pipe m)
++INSTALL(TARGETS sph2pipe
++ RUNTIME DESTINATION bin)
diff --git a/var/spack/repos/builtin/packages/sph2pipe/package.py b/var/spack/repos/builtin/packages/sph2pipe/package.py
new file mode 100644
index 0000000000..445f284902
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sph2pipe/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Sph2pipe(CMakePackage):
+ """Sph2pipe is a portable tool for
+ converting SPHERE files to other formats."""
+
+ homepage = "https://www.ldc.upenn.edu/language-resources/tools/sphere-conversion-tools"
+ url = "https://www.ldc.upenn.edu/sites/www.ldc.upenn.edu/files/ctools/sph2pipe_v2.5.tar.gz"
+
+ version('2.5', '771d9143e9aec0a22c6a14e138974be2')
+
+ patch('cmake.patch')
diff --git a/var/spack/repos/builtin/packages/sst-dumpi/package.py b/var/spack/repos/builtin/packages/sst-dumpi/package.py
new file mode 100644
index 0000000000..edb1858809
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sst-dumpi/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# Author: Samuel Knight <sknigh@sandia.gov>
+# Date: Feb 3, 2017
+#
+from spack import *
+
+
+class SstDumpi(AutotoolsPackage):
+ """The DUMPI package provides libraries to collect and read traces of MPI
+ applications. Traces are created by linking an application with a library
+ that uses the PMPI interface to intercept MPI calls. DUMPI records
+ signatures of all MPI-1 and MPI-2 subroutine calls, return values, request
+ information, and PAPI counters.
+ """
+
+ homepage = "http://sst.sandia.gov/about_dumpi.html"
+ url = "https://github.com/sstsimulator/sst-dumpi/archive/6.1.0.tar.gz"
+
+ depends_on('autoconf@1.68:', type='build')
+ depends_on('automake@1.11.1:', type='build')
+ depends_on('libtool@1.2.4:', type='build')
+ depends_on('m4', type='build')
+
+ version('master',
+ git='https://github.com/sstsimulator/sst-dumpi.git',
+ branch='master')
+
+ version('6.1.0', '31c3f40a697dc85bf23dd34270982319')
diff --git a/var/spack/repos/builtin/packages/sst-macro/package.py b/var/spack/repos/builtin/packages/sst-macro/package.py
new file mode 100644
index 0000000000..1fb927b599
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sst-macro/package.py
@@ -0,0 +1,64 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# Author: Samuel Knight <sknigh@sandia.gov>
+# Date: Feb 3, 2017
+#
+from spack import *
+
+
+class SstMacro(AutotoolsPackage):
+ """The SST/macro software package provides a simulator for large-scale
+ parallel computer architectures for the coarse-grained study of
+ distributed-memory applications. The simulator is driven from either a
+ trace file or skeleton application. SST/macro's modular architecture can
+ be extended with additional network models, trace file formats,
+ software services, and processor models.
+ """
+
+ homepage = "http://sst.sandia.gov/about_sstmacro.html"
+ url = "https://github.com/sstsimulator/sst-macro/releases/download/v6.1.0_Final/sstmacro-6.1.0.tar.gz"
+
+ depends_on('boost@1.59:')
+ depends_on('autoconf@1.68:', type='build', when='@master')
+ depends_on('automake@1.11.1:', type='build', when='@master')
+ depends_on('libtool@1.2.4:', type='build', when='@master')
+ depends_on('m4', type='build', when='@master')
+
+ version('master',
+ git='https://github.com/sstsimulator/sst-macro.git',
+ branch='master')
+
+ version('6.1.0', '98b737be6326b8bd711de832ccd94d14',
+ url='https://github.com/sstsimulator/sst-macro/releases/download/v6.1.0_Final/sstmacro-6.1.0.tar.gz')
+
+ @run_before('autoreconf')
+ def bootstrap(self):
+ if '@master' in self.spec:
+ Executable('./bootstrap.sh')()
+
+ def configure_args(self):
+ args = ['--disable-regex']
+ return args
diff --git a/var/spack/repos/builtin/packages/staden-io-lib/package.py b/var/spack/repos/builtin/packages/staden-io-lib/package.py
new file mode 100644
index 0000000000..31f9693e28
--- /dev/null
+++ b/var/spack/repos/builtin/packages/staden-io-lib/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class StadenIoLib(AutotoolsPackage):
+ """Io_lib is a library for reading/writing various bioinformatics
+ file formats."""
+
+ homepage = "http://staden.sourceforge.net/"
+ url = "https://sourceforge.net/projects/staden/files/io_lib/1.14.8/io_lib-1.14.8.tar.gz/download"
+
+ version('1.14.8', 'fe5ee6aaec8111a5bc3ac584a0c0c0c7')
+
+ depends_on('zlib')
diff --git a/var/spack/repos/builtin/packages/star-ccm-plus/package.py b/var/spack/repos/builtin/packages/star-ccm-plus/package.py
index ba1516b62a..4197aec339 100644
--- a/var/spack/repos/builtin/packages/star-ccm-plus/package.py
+++ b/var/spack/repos/builtin/packages/star-ccm-plus/package.py
@@ -31,6 +31,7 @@ class StarCcmPlus(Package):
"""STAR-CCM+ (Computational Continuum Mechanics) CFD solver."""
homepage = "http://mdx.plm.automation.siemens.com/star-ccm-plus"
+ url = "file://{0}/STAR-CCM+11.06.010_02_linux-x86_64.tar.gz".format(os.getcwd())
version('11.06.010_02', 'd349c6ac8293d8e6e7a53533d695588f')
@@ -40,10 +41,6 @@ class StarCcmPlus(Package):
license_required = True
license_vars = ['CDLMD_LICENSE_FILE', 'LM_LICENSE_FILE']
- def url_for_version(self, version):
- return "file://{0}/STAR-CCM+{1}_linux-x86_64.tar.gz".format(
- os.getcwd(), version)
-
def install(self, spec, prefix):
# There is a known issue with the LaunchAnywhere application.
# Specifically, it cannot handle long prompts or prompts
diff --git a/var/spack/repos/builtin/packages/stream/package.py b/var/spack/repos/builtin/packages/stream/package.py
index 8b3f32af8a..7e24071356 100644
--- a/var/spack/repos/builtin/packages/stream/package.py
+++ b/var/spack/repos/builtin/packages/stream/package.py
@@ -25,7 +25,7 @@
from spack import *
-class Stream(Package):
+class Stream(MakefilePackage):
"""The STREAM benchmark is a simple synthetic benchmark program that
measures sustainable memory bandwidth (in MB/s) and the corresponding
computation rate for simple vector kernels."""
@@ -36,7 +36,7 @@ class Stream(Package):
variant('openmp', default=False, description='Build with OpenMP support')
- def patch(self):
+ def edit(self, spec, prefix):
makefile = FileFilter('Makefile')
# Use the Spack compiler wrappers
@@ -54,8 +54,6 @@ class Stream(Package):
makefile.filter('FFLAGS = .*', 'FFLAGS = {0}'.format(fflags))
def install(self, spec, prefix):
- make()
-
# Manual installation
mkdir(prefix.bin)
install('stream_c.exe', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/stress/package.py b/var/spack/repos/builtin/packages/stress/package.py
new file mode 100644
index 0000000000..81bf2bd9a4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/stress/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Stress(AutotoolsPackage):
+ """stress is a deliberately simple workload generator for POSIX systems.
+ It imposes a configurable amount of CPU, memory, I/O, and disk stress on
+ the system. It is written in C, and is free software licensed under the
+ GPLv2."""
+
+ homepage = "https://people.seas.harvard.edu/~apw/stress/"
+ url = "https://people.seas.harvard.edu/~apw/stress/stress-1.0.4.tar.gz"
+
+ version('1.0.4', '890a4236dd1656792f3ef9a190cf99ef')
diff --git a/var/spack/repos/builtin/packages/sublime-text/package.py b/var/spack/repos/builtin/packages/sublime-text/package.py
index 81d8690db8..1cfb117a05 100644
--- a/var/spack/repos/builtin/packages/sublime-text/package.py
+++ b/var/spack/repos/builtin/packages/sublime-text/package.py
@@ -33,8 +33,8 @@ class SublimeText(Package):
homepage = "http://www.sublimetext.com/"
url = "https://download.sublimetext.com/sublime_text_3_build_3126_x64.tar.bz2"
- version('3126', 'acc34252b0ea7dff1f581c5db1564dcb')
- version('2.0.2', '699cd26d7fe0bada29eb1b2cd7b50e4b')
+ version('3_build_3126', 'acc34252b0ea7dff1f581c5db1564dcb')
+ version('2.0.2', '699cd26d7fe0bada29eb1b2cd7b50e4b')
# Sublime text comes as a pre-compiled binary.
# Since we can't link to Spack packages, we'll just have to
diff --git a/var/spack/repos/builtin/packages/subversion/package.py b/var/spack/repos/builtin/packages/subversion/package.py
index 02b7d96378..7752528b43 100644
--- a/var/spack/repos/builtin/packages/subversion/package.py
+++ b/var/spack/repos/builtin/packages/subversion/package.py
@@ -27,18 +27,24 @@ from spack import *
class Subversion(Package):
"""Apache Subversion - an open source version control system."""
- homepage = 'https://subversion.apache.org/'
- url = 'http://archive.apache.org/dist/subversion/subversion-1.8.13.tar.gz'
+ homepage = 'https://subversion.apache.org/'
+ url = 'http://archive.apache.org/dist/subversion/subversion-1.8.13.tar.gz'
version('1.8.13', '8065b3698d799507fb72dd7926ed32b6')
version('1.9.3', 'a92bcfaec4e5038f82c74a7b5bbd2f46')
+ variant('perl', default=False, description='Build with Perl bindings')
+
depends_on('apr')
depends_on('apr-util')
depends_on('zlib')
depends_on('sqlite')
depends_on('serf')
+ extends('perl', when='+perl')
+ depends_on('swig@1.3.24:3.0.0', when='+perl')
+ depends_on('perl-term-readkey', when='+perl')
+
# Optional: We need swig if we want the Perl, Python or Ruby
# bindings.
# depends_on('swig')
@@ -60,12 +66,30 @@ class Subversion(Package):
options.append('--with-zlib=%s' % spec['zlib'].prefix)
options.append('--with-sqlite=%s' % spec['sqlite'].prefix)
options.append('--with-serf=%s' % spec['serf'].prefix)
- # options.append('--with-swig=%s' % spec['swig'].prefix)
+
+ if spec.satisfies('^swig'):
+ options.append('--with-swig=%s' % spec['swig'].prefix)
+ if spec.satisfies('+perl'):
+ options.append(
+ 'PERL=%s' % join_path(spec['perl'].prefix.bin, 'perl'))
configure(*options)
make()
+ if self.run_tests:
+ make('check')
make('install')
+ if spec.satisfies('+perl'):
+ make('swig-pl')
+ if self.run_tests:
+ make('check-swig-pl')
+ make('install-swig-pl-lib')
+ with working_dir(join_path(
+ 'subversion', 'bindings', 'swig', 'perl', 'native')):
+ perl = which('perl')
+ perl('Makefile.PL', 'INSTALL_BASE=%s' % prefix)
+ make('install')
+
# python bindings
# make('swig-py',
# 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn',
@@ -74,10 +98,6 @@ class Subversion(Package):
# 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn',
# 'swig_pydir_extra=/usr/lib/python2.7/site-packages/svn')
- # perl bindings
- # make('swig-pl')
- # make('install-swig-pl')
-
# ruby bindings
# make('swig-rb')
# make('isntall-swig-rb')
diff --git a/var/spack/repos/builtin/packages/symengine/package.py b/var/spack/repos/builtin/packages/symengine/package.py
index e3c00f849a..136575e48c 100644
--- a/var/spack/repos/builtin/packages/symengine/package.py
+++ b/var/spack/repos/builtin/packages/symengine/package.py
@@ -40,16 +40,18 @@ class Symengine(CMakePackage):
description='Compile with Boost multi-precision integer library')
variant('flint', default=False,
description='Compile with Flint integer library')
+ variant('llvm', default=False,
+ description='Compile with LLVM JIT compiler support')
variant('mpc', default=True,
description='Compile with MPC library')
variant('mpfr', default=True,
description='Compile with MPFR library')
+ variant('openmp', default=False,
+ description='Enable OpenMP support')
variant('piranha', default=False,
description='Compile with Piranha integer library')
variant('thread_safe', default=True,
description='Enable thread safety option')
- variant('openmp', default=False,
- description='Enable OpenMP support')
variant('shared', default=True,
description='Enables the build of shared libraries')
@@ -61,6 +63,7 @@ class Symengine(CMakePackage):
# NOTE: [mpc,mpfr,flint,piranha] could also be built against mpir
depends_on('boost', when='+boostmp')
depends_on('gmp', when='~boostmp')
+ depends_on('llvm', when='+llvm')
depends_on('mpc', when='+mpc~boostmp')
depends_on('mpfr', when='+mpfr~boostmp')
depends_on('flint', when='+flint~boostmp')
@@ -84,6 +87,8 @@ class Symengine(CMakePackage):
'-DBUILD_TESTS:BOOL=%s' % (
'ON' if self.run_tests else 'OFF'),
'-DBUILD_BENCHMARKS:BOOL=ON',
+ '-DWITH_LLVM:BOOL=%s' % (
+ 'ON' if '+llvm' in spec else 'OFF'),
'-DWITH_OPENMP:BOOL=%s' % (
'ON' if '+openmp' in spec else 'OFF'),
'-DBUILD_SHARED_LIBS:BOOL=%s' % (
diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py
index 79d4bc7544..8ddfc903b3 100644
--- a/var/spack/repos/builtin/packages/tcl/package.py
+++ b/var/spack/repos/builtin/packages/tcl/package.py
@@ -34,6 +34,7 @@ class Tcl(AutotoolsPackage):
that is truly cross platform, easily deployed and highly
extensible."""
homepage = "http://www.tcl.tk"
+ url = "http://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz"
version('8.6.6', '5193aea8107839a79df8ac709552ecb7')
version('8.6.5', '0e6426a4ca9401825fbc6ecf3d89a326')
@@ -45,10 +46,6 @@ class Tcl(AutotoolsPackage):
configure_directory = 'unix'
- def url_for_version(self, version):
- base_url = 'http://prdownloads.sourceforge.net/tcl'
- return '{0}/tcl{1}-src.tar.gz'.format(base_url, version)
-
def setup_environment(self, spack_env, run_env):
# When using Tkinter from within spack provided python+tk, python
# will not be able to find Tcl/Tk unless TCL_LIBRARY is set.
diff --git a/var/spack/repos/builtin/packages/tetgen/package.py b/var/spack/repos/builtin/packages/tetgen/package.py
index 6e5ed79c36..2ccc9504e2 100644
--- a/var/spack/repos/builtin/packages/tetgen/package.py
+++ b/var/spack/repos/builtin/packages/tetgen/package.py
@@ -34,7 +34,7 @@ class Tetgen(Package):
boundary conforming Delaunay meshes, and Voronoi paritions.
"""
- homepage = "http://www.tetgen.org"
+ homepage = "http://wias-berlin.de/software/tetgen/"
version('1.5.0', '3b9fd9cdec121e52527b0308f7aad5c1', url='http://www.tetgen.org/1.5/src/tetgen1.5.0.tar.gz')
version('1.4.3', 'd6a4bcdde2ac804f7ec66c29dcb63c18', url='http://www.tetgen.org/files/tetgen1.4.3.tar.gz')
diff --git a/var/spack/repos/builtin/packages/tinyxml/package.py b/var/spack/repos/builtin/packages/tinyxml/package.py
index 1789d9022e..45970ca4f8 100644
--- a/var/spack/repos/builtin/packages/tinyxml/package.py
+++ b/var/spack/repos/builtin/packages/tinyxml/package.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+from shutil import copyfile
import os.path
@@ -30,10 +31,14 @@ class Tinyxml(CMakePackage):
"""Simple, small, efficient, C++ XML parser"""
homepage = "http://grinninglizard.com/tinyxml/"
- url = "https://sourceforge.net/projects/tinyxml/files/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz"
+ url = "https://downloads.sourceforge.net/project/tinyxml/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz"
version('2.6.2', 'cba3f50dd657cb1434674a03b21394df9913d764')
+ def url_for_version(self, version):
+ url = "https://sourceforge.net/projects/tinyxml/files/tinyxml/{0}/tinyxml_{1}.tar.gz"
+ return url.format(version.dotted, version.underscored)
+
def patch(self):
copyfile(join_path(os.path.dirname(__file__),
"CMakeLists.txt"), "CMakeLists.txt")
diff --git a/var/spack/repos/builtin/packages/tk/package.py b/var/spack/repos/builtin/packages/tk/package.py
index 4d9651315a..fdcb29a785 100644
--- a/var/spack/repos/builtin/packages/tk/package.py
+++ b/var/spack/repos/builtin/packages/tk/package.py
@@ -33,6 +33,7 @@ class Tk(AutotoolsPackage):
applications that run unchanged across Windows, Mac OS X, Linux
and more."""
homepage = "http://www.tcl.tk"
+ url = "http://prdownloads.sourceforge.net/tcl/tk8.6.5-src.tar.gz"
version('8.6.6', 'dd7dbb3a6523c42d05f6ab6e86096e99')
version('8.6.5', '11dbbd425c3e0201f20d6a51482ce6c4')
@@ -43,10 +44,6 @@ class Tk(AutotoolsPackage):
configure_directory = 'unix'
- def url_for_version(self, version):
- base_url = "http://prdownloads.sourceforge.net/tcl"
- return "{0}/tk{1}-src.tar.gz".format(base_url, version)
-
def setup_environment(self, spack_env, run_env):
# When using Tkinter from within spack provided python+tk, python
# will not be able to find Tcl/Tk unless TK_LIBRARY is set.
diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py
index 3de72ea6c8..0e0d86fa3c 100644
--- a/var/spack/repos/builtin/packages/trilinos/package.py
+++ b/var/spack/repos/builtin/packages/trilinos/package.py
@@ -44,7 +44,7 @@ class Trilinos(CMakePackage):
A unique design feature of Trilinos is its focus on packages.
"""
homepage = "https://trilinos.org/"
- base_url = "https://github.com/trilinos/Trilinos/archive"
+ url = "https://github.com/trilinos/Trilinos/archive/trilinos-release-12-10-1.tar.gz"
version('develop',
git='https://github.com/trilinos/Trilinos.git', tag='develop')
@@ -63,10 +63,6 @@ class Trilinos(CMakePackage):
version('11.14.2', 'e7c3cdbbfe3279a8a68838b873ad6d51')
version('11.14.1', 'b7760b142eef66c79ed13de7c9560f81')
- def url_for_version(self, version):
- return '%s/trilinos-release-%s.tar.gz' % \
- (Trilinos.base_url, version.dashed)
-
variant('xsdkflags', default=False,
description='Compile using the default xSDK configuration')
variant('metis', default=True,
@@ -125,6 +121,10 @@ class Trilinos(CMakePackage):
patch('umfpack_from_suitesparse.patch', when='@:12.8.1')
+ def url_for_version(self, version):
+ url = "https://github.com/trilinos/Trilinos/archive/trilinos-release-{0}.tar.gz"
+ return url.format(version.dashed)
+
# check that the combination of variants makes sense
def variants_check(self):
if '+superlu-dist' in self.spec and self.spec.satisfies('@:11.4.3'):
diff --git a/var/spack/repos/builtin/packages/unison/package.py b/var/spack/repos/builtin/packages/unison/package.py
index 181e1e6410..aa890ea869 100644
--- a/var/spack/repos/builtin/packages/unison/package.py
+++ b/var/spack/repos/builtin/packages/unison/package.py
@@ -34,7 +34,7 @@ class Unison(Package):
other."""
homepage = "https://www.cis.upenn.edu/~bcpierce/unison/"
- url = "https://www.seas.upenn.edu/~bcpierce/unison//download/releases/stable/unison-2.48.3.tar.gz"
+ url = "https://www.seas.upenn.edu/~bcpierce/unison//download/releases/stable/unison-2.48.4.tar.gz"
version('2.48.4', '5334b78c7e68169df7de95f4c6c4b60f')
diff --git a/var/spack/repos/builtin/packages/util-linux/package.py b/var/spack/repos/builtin/packages/util-linux/package.py
index 5bbaa995f8..2fc413a98e 100644
--- a/var/spack/repos/builtin/packages/util-linux/package.py
+++ b/var/spack/repos/builtin/packages/util-linux/package.py
@@ -31,7 +31,7 @@ class UtilLinux(AutotoolsPackage):
homepage = "http://freecode.com/projects/util-linux"
url = "https://www.kernel.org/pub/linux/utils/util-linux/v2.29/util-linux-2.29.1.tar.gz"
list_url = "https://www.kernel.org/pub/linux/utils/util-linux"
- list_depth = 2
+ list_depth = 1
version('2.29.1', 'c7d5c111ef6bc5df65659e0b523ac9d9')
version('2.25', 'f6d7fc6952ec69c4dc62c8d7c59c1d57')
diff --git a/var/spack/repos/builtin/packages/vim/package.py b/var/spack/repos/builtin/packages/vim/package.py
index 1ac0ce5186..4c5035e3ae 100644
--- a/var/spack/repos/builtin/packages/vim/package.py
+++ b/var/spack/repos/builtin/packages/vim/package.py
@@ -37,6 +37,7 @@ class Vim(AutotoolsPackage):
homepage = "http://www.vim.org"
url = "https://github.com/vim/vim/archive/v8.0.0134.tar.gz"
+ version('8.0.0503', '82b77bd5cb38b70514bed47cfe033b8c')
version('8.0.0454', '4030bf677bdfbd14efb588e4d9a24128')
version('8.0.0134', 'c74668d25c2acc85d655430dd60886cd')
version('7.4.2367', 'a0a7bc394f7ab1d95571fe6ab05da3ea')
diff --git a/var/spack/repos/builtin/packages/voropp/package.py b/var/spack/repos/builtin/packages/voropp/package.py
index 0e39769927..0fc84ef252 100644
--- a/var/spack/repos/builtin/packages/voropp/package.py
+++ b/var/spack/repos/builtin/packages/voropp/package.py
@@ -31,19 +31,10 @@ class Voropp(MakefilePackage):
scientific fields."""
homepage = "http://math.lbl.gov/voro++/about.html"
-
- # This url is wrong but it passes the test the ++ make the url parser fail,
- # the correct url is constructed by url_for_version that has to be used in
- # any case due to the difference between the package name and the url
- url = "http://math.lbl.gov/voropp/download/dir/voropp-0.4.6.tar.gz"
+ url = "http://math.lbl.gov/voro++/download/dir/voro++-0.4.6.tar.gz"
version('0.4.6', '2338b824c3b7b25590e18e8df5d68af9')
- def url_for_version(self, version):
- url = "http://math.lbl.gov/voro++/download/dir/voro++-{0}.tar.gz".format( # noqa: E501
- str(version))
- return url
-
def edit(self, spec, prefix):
filter_file(r'CC=g\+\+',
'CC={0}'.format(self.compiler.cxx),
diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py
index c577949c3a..dafeae6dbb 100644
--- a/var/spack/repos/builtin/packages/vtk/package.py
+++ b/var/spack/repos/builtin/packages/vtk/package.py
@@ -33,6 +33,7 @@ class Vtk(CMakePackage):
homepage = "http://www.vtk.org"
url = "http://www.vtk.org/files/release/7.1/VTK-7.1.0.tar.gz"
+ list_url = "http://www.vtk.org/download/"
version('7.1.0', 'a7e814c1db503d896af72458c2d0228f')
version('7.0.0', '5fe35312db5fb2341139b8e4955c367d')
diff --git a/var/spack/repos/builtin/packages/wannier90/package.py b/var/spack/repos/builtin/packages/wannier90/package.py
index ad61860d73..25d238dd64 100644
--- a/var/spack/repos/builtin/packages/wannier90/package.py
+++ b/var/spack/repos/builtin/packages/wannier90/package.py
@@ -29,7 +29,7 @@ import shutil
from spack import *
-class Wannier90(Package):
+class Wannier90(MakefilePackage):
"""Wannier90 calculates maximally-localised Wannier functions (MLWFs).
Wannier90 is released under the GNU General Public License.
@@ -37,6 +37,7 @@ class Wannier90(Package):
homepage = 'http://wannier.org'
url = 'http://wannier.org/code/wannier90-2.0.1.tar.gz'
+ version('2.1.0', '07a81c002b41d6d0f97857e55c57d769')
version('2.0.1', '4edd742506eaba93317249d33261fb22')
depends_on('mpi')
@@ -45,7 +46,23 @@ class Wannier90(Package):
parallel = False
- def install(self, spec, prefix):
+ build_targets = [
+ 'wannier', 'post', 'lib', 'w90chk2chk', 'w90vdw', 'w90pov'
+ ]
+
+ @property
+ def makefile_name(self):
+ # Older versions use 'make.sys'
+ filename = 'make.sys'
+
+ # While newer search for 'make.inc'
+ if self.spec.satisfies('@2.1.0:'):
+ filename = 'make.inc'
+
+ abspath = join_path(self.stage.source_path, filename)
+ return abspath
+
+ def edit(self, spec, prefix):
lapack = self.spec['lapack'].libs
blas = self.spec['blas'].libs
@@ -54,57 +71,46 @@ class Wannier90(Package):
'@MPIF90': self.spec['mpi'].mpifc,
'@LIBS': (lapack + blas).joined()
}
- #######
- # TODO : this part is replicated in PEXSI
- # TODO : and may be a common pattern for Editable Makefiles
- # TODO : see #1186
+
template = join_path(
os.path.dirname(inspect.getmodule(self).__file__),
'make.sys'
)
- makefile = join_path(
- self.stage.source_path,
- 'make.sys'
- )
- shutil.copy(template, makefile)
+ shutil.copy(template, self.makefile_name)
for key, value in substitutions.items():
- filter_file(key, value, makefile)
- ######
+ filter_file(key, value, self.makefile_name)
+
+ def install(self, spec, prefix):
- make('wannier')
mkdirp(self.prefix.bin)
+ mkdirp(self.prefix.lib)
+
install(
join_path(self.stage.source_path, 'wannier90.x'),
join_path(self.prefix.bin, 'wannier90.x')
)
- make('post')
install(
join_path(self.stage.source_path, 'postw90.x'),
join_path(self.prefix.bin, 'postw90.x')
)
- make('lib')
- mkdirp(self.prefix.lib)
install(
join_path(self.stage.source_path, 'libwannier.a'),
join_path(self.prefix.lib, 'libwannier.a')
)
- make('w90chk2chk')
install(
join_path(self.stage.source_path, 'w90chk2chk.x'),
join_path(self.prefix.bin, 'w90chk2chk.x')
)
- make('w90vdw')
install(
join_path(self.stage.source_path, 'utility', 'w90vdw', 'w90vdw.x'),
join_path(self.prefix.bin, 'w90vdw.x')
)
- make('w90pov')
install(
join_path(self.stage.source_path, 'utility', 'w90pov', 'w90pov'),
join_path(self.prefix.bin, 'w90pov')
diff --git a/var/spack/repos/builtin/packages/xsdktrilinos/package.py b/var/spack/repos/builtin/packages/xsdktrilinos/package.py
index ea49054435..7e88b2f9eb 100644
--- a/var/spack/repos/builtin/packages/xsdktrilinos/package.py
+++ b/var/spack/repos/builtin/packages/xsdktrilinos/package.py
@@ -32,16 +32,12 @@ class Xsdktrilinos(CMakePackage):
Trilinos.
"""
homepage = "https://trilinos.org/"
- base_url = "https://github.com/trilinos/xSDKTrilinos/archive"
+ url = "https://github.com/trilinos/xSDKTrilinos/archive/trilinos-release-12-8-1.tar.gz"
version('develop', git='https://github.com/trilinos/xSDKTrilinos.git', tag='master')
version('12.8.1', '9cc338ded17d1e10ea6c0dc18b22dcd4')
version('12.6.4', '44c4c54ccbac73bb8939f68797b9454a')
- def url_for_version(self, version):
- return '%s/trilinos-release-%s.tar.gz' % \
- (Xsdktrilinos.base_url, version.dashed)
-
variant('hypre', default=True,
description='Compile with Hypre preconditioner')
variant('petsc', default=True,
@@ -59,6 +55,10 @@ class Xsdktrilinos(CMakePackage):
depends_on('trilinos@12.8.1', when='@12.8.1')
depends_on('trilinos@develop', when='@develop')
+ def url_for_version(self, version):
+ url = "https://github.com/trilinos/xSDKTrilinos/archive/trilinos-release-{0}.tar.gz"
+ return url.format(version.dashed)
+
def cmake_args(self):
spec = self.spec
diff --git a/var/spack/repos/builtin/packages/yorick/package.py b/var/spack/repos/builtin/packages/yorick/package.py
index 52a4d8787d..9cbd417e4e 100644
--- a/var/spack/repos/builtin/packages/yorick/package.py
+++ b/var/spack/repos/builtin/packages/yorick/package.py
@@ -31,7 +31,7 @@ import glob
class Yorick(Package):
"""Yorick is an interpreted programming language for scientific simulations
or calculations, postprocessing or steering large simulation codes,
- interactive scientific graphics, and reading, writing, or translating
+ interactive scientific graphics, and reading, writing, or translating
files of numbers. Yorick includes an interactive graphics package, and a
binary file package capable of translating to and from the raw numeric
formats of all modern computers. Yorick is written in ANSI C and runs on
@@ -39,9 +39,9 @@ class Yorick(Package):
"""
homepage = "http://dhmunro.github.io/yorick-doc/"
- url = "https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz"
+ url = "https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz"
- version('2.2.04', md5='1b5b0da6ad81b2d9dba64d991ec17939')
+ version('2.2.04', '1b5b0da6ad81b2d9dba64d991ec17939')
version('master', branch='master',
git='https://github.com/dhmunro/yorick.git')
version('f90-plugin', branch='f90-plugin',
@@ -51,6 +51,10 @@ class Yorick(Package):
depends_on('libx11', when='+X')
+ def url_for_version(self, version):
+ url = "https://github.com/dhmunro/yorick/archive/y_{0}.tar.gz"
+ return url.format(version.underscored)
+
def install(self, spec, prefix):
os.environ['FORTRAN_LINKAGE'] = '-Df_linkage'
diff --git a/var/spack/repos/builtin/packages/zoltan/package.py b/var/spack/repos/builtin/packages/zoltan/package.py
index 21c90a05e4..b6720b7b1e 100644
--- a/var/spack/repos/builtin/packages/zoltan/package.py
+++ b/var/spack/repos/builtin/packages/zoltan/package.py
@@ -41,7 +41,7 @@ class Zoltan(Package):
"""
homepage = "http://www.cs.sandia.gov/zoltan"
- base_url = "http://www.cs.sandia.gov/~kddevin/Zoltan_Distributions"
+ url = "http://www.cs.sandia.gov/~kddevin/Zoltan_Distributions/zoltan_distrib_v3.83.tar.gz"
version('3.83', '1ff1bc93f91e12f2c533ddb01f2c095f')
version('3.8', '9d8fba8a990896881b85351d4327c4a9')
@@ -56,9 +56,6 @@ class Zoltan(Package):
depends_on('mpi', when='+mpi')
- def url_for_version(self, version):
- return '%s/zoltan_distrib_v%s.tar.gz' % (Zoltan.base_url, version)
-
def install(self, spec, prefix):
# FIXME: The older Zoltan versions fail to compile the F90 MPI wrappers
# because of some complicated generic type problem.