diff options
author | alalazo <massimiliano.culpo@googlemail.com> | 2016-10-11 16:28:08 +0200 |
---|---|---|
committer | alalazo <massimiliano.culpo@googlemail.com> | 2016-10-11 16:28:08 +0200 |
commit | ab995df777ca8bd0340f66be5624fa543517c13d (patch) | |
tree | 660335e5d00ae68ab61b3cd21faef3e9135aacb1 | |
parent | 7a26c60dbd73e0d5c5c6ec1fce8087c889fd0f25 (diff) | |
parent | 4d939802ae208a79ef685fe59b204bcc86df4b6b (diff) | |
download | spack-ab995df777ca8bd0340f66be5624fa543517c13d.tar.gz spack-ab995df777ca8bd0340f66be5624fa543517c13d.tar.bz2 spack-ab995df777ca8bd0340f66be5624fa543517c13d.tar.xz spack-ab995df777ca8bd0340f66be5624fa543517c13d.zip |
Merge branch 'develop' of https://github.com/LLNL/spack into features/install_with_phases_rebase
Conflicts:
lib/spack/spack/build_environment.py
lib/spack/spack/cmd/install.py
lib/spack/spack/cmd/setup.py
lib/spack/spack/package.py
var/spack/repos/builtin/packages/gmp/package.py
var/spack/repos/builtin/packages/hdf5/package.py
186 files changed, 5436 insertions, 1635 deletions
diff --git a/.gitignore b/.gitignore index e6200a0676..f25ac615fa 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,5 @@ /TAGS /htmlcov .coverage +#* +.#* diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst index fd701789ec..bbedfff3fc 100644 --- a/lib/spack/docs/basic_usage.rst +++ b/lib/spack/docs/basic_usage.rst @@ -1,7 +1,7 @@ .. _basic-usage: =========== -Basic usage +Basic Usage =========== The ``spack`` command has many *subcommands*. You'll only need a @@ -14,7 +14,7 @@ Spack to maintain this colorization. E.g.: $ spack find | less -R -It is recommend that the following be put in your ``.bashrc`` file: +It is recommended that the following be put in your ``.bashrc`` file: .. code-block:: sh @@ -28,7 +28,7 @@ To install software with Spack, you need to know what software is available. You can see a list of available package names at the :ref:`package-list` webpage, or using the ``spack list`` command. -.. _spack-list: +.. _cmd-spack-list: ^^^^^^^^^^^^^^ ``spack list`` @@ -57,13 +57,13 @@ All packages whose names start with a capital M: All packages whose names or descriptions contain Documentation: -.. command-output:: spack list -d Documentation +.. command-output:: spack list --search-description Documentation All packages whose names contain documentation case insensitive: -.. command-output:: spack list -d documentation +.. command-output:: spack list --search-description documentation -.. _spack-info: +.. _cmd-spack-info: ^^^^^^^^^^^^^^ ``spack info`` @@ -82,7 +82,7 @@ viruses. :ref:`Dependencies <sec-specs>` and :ref:`virtual dependencies <sec-virtual-dependencies>` are described in more detail later. -.. _spack-versions: +.. _cmd-spack-versions: ^^^^^^^^^^^^^^^^^^ ``spack versions`` @@ -107,7 +107,7 @@ able to find remote versions. Installing and uninstalling --------------------------- -.. _spack-install: +.. _cmd-spack-install: ^^^^^^^^^^^^^^^^^ ``spack install`` @@ -180,14 +180,14 @@ configuration a **spec**. In the commands above, ``mpileaks`` and ``mpileaks@3.0.4`` are both valid *specs*. We'll talk more about how you can use them to customize an installation in :ref:`sec-specs`. -.. _spack-uninstall: +.. _cmd-spack-uninstall: ^^^^^^^^^^^^^^^^^^^ ``spack uninstall`` ^^^^^^^^^^^^^^^^^^^ To uninstall a package, type ``spack uninstall <package>``. This will ask -the user for confirmation, and in case will completely remove the directory +the user for confirmation before completely removing the directory in which the package was installed. .. code-block:: console @@ -230,6 +230,54 @@ but you risk breaking other installed packages. In general, it is safer to remove dependent packages *before* removing their dependencies or use the ``--dependents`` option. + +.. _nondownloadable: + +^^^^^^^^^^^^^^^^^^^^^^^^^ +Non-Downloadable Tarballs +^^^^^^^^^^^^^^^^^^^^^^^^^ + +The tarballs for some packages cannot be automatically downloaded by +Spack. This could be for a number of reasons: + +#. The author requires users to manually accept a license agreement + before downloading (``jdk`` and ``galahad``). + +#. The software is proprietary and cannot be downloaded on the open + Internet. + +To install these packages, one must create a mirror and manually add +the tarballs in question to it (see :ref:`mirrors`): + +#. Create a directory for the mirror. You can create this directory + anywhere you like, it does not have to be inside ``~/.spack``: + + .. code-block:: console + + $ mkdir ~/.spack/manual_mirror + +#. Register the mirror with Spack by creating ``~/.spack/mirrors.yaml``: + + .. code-block:: yaml + + mirrors: + manual: file:///home/me/.spack/manual_mirror + +#. Put your tarballs in it. Tarballs should be named + ``<package>/<package>-<version>.tar.gz``. For example: + + .. code-block:: console + + $ ls -l manual_mirror/galahad + + -rw-------. 1 me me 11657206 Jun 21 19:25 galahad-2.60003.tar.gz + +#. Install as usual: + + .. code-block:: console + + $ spack install galahad + ------------------------- Seeing installed packages ------------------------- @@ -237,7 +285,7 @@ Seeing installed packages We know that ``spack list`` shows you the names of available packages, but how do you figure out which are already installed? -.. _spack-find: +.. _cmd-spack-find: ^^^^^^^^^^^^^^ ``spack find`` @@ -382,175 +430,6 @@ with the 'debug' compile-time option enabled. The full spec syntax is discussed in detail in :ref:`sec-specs`. -.. _compiler-config: - ----------------------- -Compiler configuration ----------------------- - -Spack has the ability to build packages with multiple compilers and -compiler versions. Spack searches for compilers on your machine -automatically the first time it is run. It does this by inspecting -your ``PATH``. - -.. _spack-compilers: - -^^^^^^^^^^^^^^^^^^^ -``spack compilers`` -^^^^^^^^^^^^^^^^^^^ - -You can see which compilers spack has found by running ``spack -compilers`` or ``spack compiler list``: - -.. code-block:: console - - $ spack compilers - ==> Available compilers - -- gcc --------------------------------------------------------- - gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7 - gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2 - -- intel ------------------------------------------------------- - intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0 - intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1 - intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1 - intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1 - -- clang ------------------------------------------------------- - clang@3.4 clang@3.3 clang@3.2 clang@3.1 - -- pgi --------------------------------------------------------- - pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1 - pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3 - pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6 - -Any of these compilers can be used to build Spack packages. More on -how this is done is in :ref:`sec-specs`. - -.. _spack-compiler-add: - -^^^^^^^^^^^^^^^^^^^^^^ -``spack compiler add`` -^^^^^^^^^^^^^^^^^^^^^^ - -An alias for ``spack compiler find``. - -.. _spack-compiler-find: - -^^^^^^^^^^^^^^^^^^^^^^^ -``spack compiler find`` -^^^^^^^^^^^^^^^^^^^^^^^ - -If you do not see a compiler in this list, but you want to use it with -Spack, you can simply run ``spack compiler find`` with the path to -where the compiler is installed. For example: - -.. code-block:: console - - $ spack compiler find /usr/local/tools/ic-13.0.079 - ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml - intel@13.0.079 - -Or you can run ``spack compiler find`` with no arguments to force -auto-detection. This is useful if you do not know where compilers are -installed, but you know that new compilers have been added to your -``PATH``. For example, using dotkit, you might do this: - -.. code-block:: console - - $ module load gcc-4.9.0 - $ spack compiler find - ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml - gcc@4.9.0 - -This loads the environment module for gcc-4.9.0 to add it to -``PATH``, and then it adds the compiler to Spack. - -.. _spack-compiler-info: - -^^^^^^^^^^^^^^^^^^^^^^^ -``spack compiler info`` -^^^^^^^^^^^^^^^^^^^^^^^ - -If you want to see specifics on a particular compiler, you can run -``spack compiler info`` on it: - -.. code-block:: console - - $ spack compiler info intel@15 - intel@15.0.0: - cc = /usr/local/bin/icc-15.0.090 - cxx = /usr/local/bin/icpc-15.0.090 - f77 = /usr/local/bin/ifort-15.0.090 - fc = /usr/local/bin/ifort-15.0.090 - modules = [] - operating system = centos6 - -This shows which C, C++, and Fortran compilers were detected by Spack. -Notice also that we didn't have to be too specific about the -version. We just said ``intel@15``, and information about the only -matching Intel compiler was displayed. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Manual compiler configuration -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -If auto-detection fails, you can manually configure a compiler by -editing your ``~/.spack/compilers.yaml`` file. You can do this by running -``spack config edit compilers``, which will open the file in your ``$EDITOR``. - -Each compiler configuration in the file looks like this: - -.. code-block:: yaml - - compilers: - - compiler: - modules = [] - operating_system: centos6 - paths: - cc: /usr/local/bin/icc-15.0.024-beta - cxx: /usr/local/bin/icpc-15.0.024-beta - f77: /usr/local/bin/ifort-15.0.024-beta - fc: /usr/local/bin/ifort-15.0.024-beta - spec: intel@15.0.0: - -For compilers, like ``clang``, that do not support Fortran, put -``None`` for ``f77`` and ``fc``: - -.. code-block:: yaml - - paths: - cc: /usr/bin/clang - cxx: /usr/bin/clang++ - f77: None - fc: None - spec: clang@3.3svn: - -Once you save the file, the configured compilers will show up in the -list displayed by ``spack compilers``. - -You can also add compiler flags to manually configured compilers. The -valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, -``ldflags``, and ``ldlibs``. For example: - -.. code-block:: yaml - - compilers: - - compiler: - modules = [] - operating_system: OS - paths: - cc: /usr/local/bin/icc-15.0.024-beta - cxx: /usr/local/bin/icpc-15.0.024-beta - f77: /usr/local/bin/ifort-15.0.024-beta - fc: /usr/local/bin/ifort-15.0.024-beta - parameters: - cppflags: -O3 -fPIC - spec: intel@15.0.0: - -These flags will be treated by spack as if they were enterred from -the command line each time this compiler is used. The compiler wrappers -then inject those flags into the compiler command. Compiler flags -enterred from the command line will be discussed in more detail in the -following section. - .. _sec-specs: -------------------- @@ -919,7 +798,7 @@ it refers. Otherwise, it will prompt for a more qualified hash. Note that this will not work to reinstall a depencency uninstalled by ``spack uninstall --force``. -.. _spack-providers: +.. _cmd-spack-providers: ^^^^^^^^^^^^^^^^^^^ ``spack providers`` @@ -945,51 +824,17 @@ versions are now filtered out. Integration with module systems ------------------------------- -.. note:: - - Environment module support is currently experimental and should not - be considered a stable feature of Spack. In particular, the - interface and/or generated module names may change in future - versions. - -Spack provides some integration with -`Environment Modules <http://modules.sourceforge.net/>`__ -and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ to make -it easier to use the packages it installed. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Installing Environment Modules -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -In order to use Spack's generated environment modules, you must have -installed the *Environment Modules* package. On many Linux -distributions, this can be installed from the vendor's repository: +Spack provides some integration with `Environment Modules +<http://modules.sourceforge.net/>`_ to make it easier to use the +packages it installs. If your system does not already have +Environment Modules, see :ref:`InstallEnvironmentModules`. -.. code-block:: sh - - $ yum install environment-modules # (Fedora/RHEL/CentOS) - $ apt-get install environment-modules # (Ubuntu/Debian) - -If your Linux distribution does not have -Environment Modules, you can get it with Spack: - -.. code-block:: console - - $ spack install environment-modules - -In this case to activate it automatically you need to add the following two -lines to your ``.bashrc`` profile (or similar): - -.. code-block:: sh - - MODULES_HOME=`spack location -i environment-modules` - source ${MODULES_HOME}/Modules/init/bash - -If you use a Unix shell other than ``bash``, modify the commands above -accordingly and source the appropriate file in -``${MODULES_HOME}/Modules/init/``. +.. note:: -.. TODO : Add a similar section on how to install dotkit ? + Spack also supports `Dotkit + <https://computing.llnl.gov/?set=jobs&page=dotkit>`_, which is used + by some systems. If you system does not already have a module + system installed, you should use Environment Modules or LMod. ^^^^^^^^^^^^^^^^^^^^^^^^ Spack and module systems @@ -1196,9 +1041,36 @@ of module files: """Set up the compile and runtime environments for a package.""" pass -""""""""""""""""" +.. code-block:: python + + def setup_dependent_environment(self, spack_env, run_env, dependent_spec): + """Set up the environment of packages that depend on this one""" + pass + +As briefly stated in the comments, the first method lets you customize the +module file content for the package you are currently writing, the second +allows for modifications to your dependees module file. In both cases one +needs to fill ``run_env`` with the desired list of environment modifications. + +"""""""""""""""""""""""""""""""""""""""""""""""" +Example : ``builtin/packages/python/package.py`` +"""""""""""""""""""""""""""""""""""""""""""""""" + +The ``python`` package that comes with the ``builtin`` Spack repository +overrides ``setup_dependent_environment`` in the following way: + +.. code-block:: python + + def setup_dependent_environment(self, spack_env, run_env, extension_spec): + if extension_spec.package.extends(self.spec): + run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir)) + +to insert the appropriate ``PYTHONPATH`` modifications in the module +files of python packages. + +^^^^^^^^^^^^^^^^^ Recursive Modules -""""""""""""""""" +^^^^^^^^^^^^^^^^^ In some cases, it is desirable to load not just a module, but also all the modules it depends on. This is not required for most modules @@ -1207,18 +1079,30 @@ packages use RPATH to find their dependencies: this can be true in particular for Python extensions, which are currently *not* built with RPATH. -Modules may be loaded recursively with the ``load`` command's -``--dependencies`` or ``-r`` argument: +Scripts to load modules recursively may be made with the command: .. code-block:: console - $ spack load --dependencies <spec> ... + $ spack module loads --dependencies <spec> + +An equivalent alternative is: + +.. code-block :: console + + $ source <( spack module loads --dependencies <spec> ) + +.. warning:: + + The ``spack load`` command does not currently accept the + ``--dependencies`` flag. Use ``spack module loads`` instead, for + now. + +.. See #1662 -More than one spec may be placed on the command line here. -""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Module Commands for Shell Scripts -""""""""""""""""""""""""""""""""" +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Although Spack is flexible, the ``module`` command is much faster. This could become an issue when emitting a series of ``spack load`` @@ -1228,75 +1112,64 @@ cut-and-pasted into a shell script. For example: .. code-block:: console - $ spack module find tcl --dependencies --shell py-numpy git - # bzip2@1.0.6%gcc@4.9.3=linux-x86_64 - module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx - # ncurses@6.0%gcc@4.9.3=linux-x86_64 - module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv - # zlib@1.2.8%gcc@4.9.3=linux-x86_64 - module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z - # sqlite@3.8.5%gcc@4.9.3=linux-x86_64 - module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr - # readline@6.3%gcc@4.9.3=linux-x86_64 - module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3 - # python@3.5.1%gcc@4.9.3=linux-x86_64 - module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi - # py-setuptools@20.5%gcc@4.9.3=linux-x86_64 - module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2 - # py-nose@1.3.7%gcc@4.9.3=linux-x86_64 - module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli - # openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64 - module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y - # py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64 - module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r - # curl@7.47.1%gcc@4.9.3=linux-x86_64 - module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi - # autoconf@2.69%gcc@4.9.3=linux-x86_64 - module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4 - # cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64 - module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t - # expat@2.1.0%gcc@4.9.3=linux-x86_64 - module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd - # git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64 - module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd + $ spack module loads --dependencies py-numpy git + # bzip2@1.0.6%gcc@4.9.3=linux-x86_64 + module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx + # ncurses@6.0%gcc@4.9.3=linux-x86_64 + module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv + # zlib@1.2.8%gcc@4.9.3=linux-x86_64 + module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z + # sqlite@3.8.5%gcc@4.9.3=linux-x86_64 + module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr + # readline@6.3%gcc@4.9.3=linux-x86_64 + module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3 + # python@3.5.1%gcc@4.9.3=linux-x86_64 + module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi + # py-setuptools@20.5%gcc@4.9.3=linux-x86_64 + module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2 + # py-nose@1.3.7%gcc@4.9.3=linux-x86_64 + module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli + # openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64 + module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y + # py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64 + module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r + # curl@7.47.1%gcc@4.9.3=linux-x86_64 + module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi + # autoconf@2.69%gcc@4.9.3=linux-x86_64 + module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4 + # cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64 + module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t + # expat@2.1.0%gcc@4.9.3=linux-x86_64 + module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd + # git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64 + module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd The script may be further edited by removing unnecessary modules. -This script may be directly executed in bash via: - -.. code-block:: sh - - source < (spack module find tcl --dependencies --shell py-numpy git) - -^^^^^^^^^^^^^^^^^^^^^^^^^ -Regenerating Module files -^^^^^^^^^^^^^^^^^^^^^^^^^ -.. code-block:: python - def setup_dependent_environment(self, spack_env, run_env, dependent_spec): - """Set up the environment of packages that depend on this one""" - pass +^^^^^^^^^^^^^^^ +Module Prefixes +^^^^^^^^^^^^^^^ -As briefly stated in the comments, the first method lets you customize the -module file content for the package you are currently writing, the second -allows for modifications to your dependees module file. In both cases one -needs to fill ``run_env`` with the desired list of environment modifications. +On some systems, modules are automatically prefixed with a certain +string; ``spack module loads`` needs to know about that prefix when it +issues ``module load`` commands. Add the ``--prefix`` option to your +``spack module loads`` commands if this is necessary. -"""""""""""""""""""""""""""""""""""""""""""""""" -Example : ``builtin/packages/python/package.py`` -"""""""""""""""""""""""""""""""""""""""""""""""" +For example, consider the following on one system: -The ``python`` package that comes with the ``builtin`` Spack repository -overrides ``setup_dependent_environment`` in the following way: +..code-block:: console -.. code-block:: python + $ module avail + linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y - def setup_dependent_environment(self, spack_env, run_env, extension_spec): - if extension_spec.package.extends(self.spec): - run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir)) + $ spack module loads antlr # WRONG! + # antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64 + module load antlr-2.7.7-gcc-5.3.0-bdpl46y -to insert the appropriate ``PYTHONPATH`` modifications in the module -files of python packages. + $ spack module loads --prefix linux-SuSE11-x86_64/ antlr + # antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y ^^^^^^^^^^^^^^^^^^^ Configuration files @@ -1461,23 +1334,14 @@ load two or more versions of the same software at the same time. The ``conflict`` option is ``tcl`` specific ^^^^^^^^^^^^^^^^^^^^^^^^^ -Regenerating module files +Regenerating Module files ^^^^^^^^^^^^^^^^^^^^^^^^^ -Sometimes you may need to regenerate the modules files. For example, -if newer, fancier module support is added to Spack at some later date, -you may want to regenerate all the modules to take advantage of these -new features. - -.. _spack-module: - -"""""""""""""""""""""""" -``spack module refresh`` -"""""""""""""""""""""""" - -Running ``spack module refresh`` will remove the -``share/spack/modules`` and ``share/spack/dotkit`` directories, then -regenerate all module and dotkit files from scratch: +Module and dotkit files are generated when packages are installed, and +are placed in the directory ``share/spack/modules`` under the Spack +root. The command ``spack refresh`` will regenerate them all without +re-building the packages; for example, if module format or options +have changed: .. code-block:: console @@ -1485,117 +1349,6 @@ regenerate all module and dotkit files from scratch: ==> Regenerating tcl module files. ==> Regenerating dotkit module files. ----------------- -Filesystem Views ----------------- - -.. Maybe this is not the right location for this documentation. - -The Spack installation area allows for many package installation trees -to coexist and gives the user choices as to what versions and variants -of packages to use. To use them, the user must rely on a way to -aggregate a subset of those packages. The section on Environment -Modules gives one good way to do that which relies on setting various -environment variables. An alternative way to aggregate is through -**filesystem views**. - -A filesystem view is a single directory tree which is the union of the -directory hierarchies of the individual package installation trees -that have been included. The files of the view's installed packages -are brought into the view by symbolic or hard links back to their -location in the original Spack installation area. As the view is -formed, any clashes due to a file having the exact same path in its -package installation tree are handled in a first-come-first-served -basis and a warning is printed. Packages and their dependencies can -be both added and removed. During removal, empty directories will be -purged. These operations can be limited to pertain to just the -packages listed by the user or to exclude specific dependencies and -they allow for software installed outside of Spack to coexist inside -the filesystem view tree. - -By its nature, a filesystem view represents a particular choice of one -set of packages among all the versions and variants that are available -in the Spack installation area. It is thus equivalent to the -directory hiearchy that might exist under ``/usr/local``. While this -limits a view to including only one version/variant of any package, it -provides the benefits of having a simpler and traditional layout which -may be used without any particular knowledge that its packages were -built by Spack. - -Views can be used for a variety of purposes including: - -* A central installation in a traditional layout, eg ``/usr/local`` maintained over time by the sysadmin. -* A self-contained installation area which may for the basis of a top-level atomic versioning scheme, eg ``/opt/pro`` vs ``/opt/dev``. -* Providing an atomic and monolithic binary distribution, eg for delivery as a single tarball. -* Producing ephemeral testing or developing environments. - -^^^^^^^^^^^^^^^^^^^^^^ -Using Filesystem Views -^^^^^^^^^^^^^^^^^^^^^^ - -A filesystem view is created and packages are linked in by the ``spack -view`` command's ``symlink`` and ``hardlink`` sub-commands. The -``spack view remove`` command can be used to unlink some or all of the -filesystem view. - -The following example creates a filesystem view based -on an installed ``cmake`` package and then removes from the view the -files in the ``cmake`` package while retaining its dependencies. - -.. code-block:: console - - $ spack view --verbose symlink myview cmake@3.5.2 - ==> Linking package: "ncurses" - ==> Linking package: "zlib" - ==> Linking package: "openssl" - ==> Linking package: "cmake" - - $ ls myview/ - bin doc etc include lib share - - $ ls myview/bin/ - captoinfo clear cpack ctest infotocap openssl tabs toe tset - ccmake cmake c_rehash infocmp ncurses6-config reset tic tput - - $ spack view --verbose --dependencies false rm myview cmake@3.5.2 - ==> Removing package: "cmake" - - $ ls myview/bin/ - captoinfo c_rehash infotocap openssl tabs toe tset - clear infocmp ncurses6-config reset tic tput - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Limitations of Filesystem Views -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -This section describes some limitations that should be considered in -using filesystems views. - -Filesystem views are merely organizational. The binary executable -programs, shared libraries and other build products found in a view -are mere links into the "real" Spack installation area. If a view is -built with symbolic links it requires the Spack-installed package to -be kept in place. Building a view with hardlinks removes this -requirement but any internal paths (eg, rpath or ``#!`` interpreter -specifications) will still require the Spack-installed package files -to be in place. - -.. FIXME: reference the relocation work of Hegner and Gartung. - -As described above, when a view is built only a single instance of a -file may exist in the unified filesystem tree. If more than one -package provides a file at the same path (relative to its own root) -then it is the first package added to the view that "wins". A warning -is printed and it is up to the user to determine if the conflict -matters. - -It is up to the user to assure a consistent view is produced. In -particular if the user excludes packages, limits the following of -dependencies or removes packages the view may become inconsistent. In -particular, if two packages require the same sub-tree of dependencies, -removing one package (recursively) will remove its dependencies and -leave the other package broken. - .. _extensions: --------------------------- @@ -1620,7 +1373,7 @@ an *extension*. Suppose you have Python installed like so: -- linux-debian7-x86_64 / gcc@4.4.7 -------------------------------- python@2.7.8 -.. _spack-extensions: +.. _cmd-spack-extensions: ^^^^^^^^^^^^^^^^^^^^ ``spack extensions`` @@ -1714,7 +1467,7 @@ for this case. Instead of requiring users to load particular environment modules, you can *activate* the package within the Python installation: -.. _spack-activate: +.. _cmd-spack-activate: ^^^^^^^^^^^^^^^^^^ ``spack activate`` @@ -1779,19 +1532,19 @@ into the same prefix. Users who want a different version of a package can still get it by using environment modules, but they will have to explicitly load their preferred version. -^^^^^^^^^^^^^^^^^^^^^ -``spack activate -f`` -^^^^^^^^^^^^^^^^^^^^^ +^^^^^^^^^^^^^^^^^^^^^^^^^^ +``spack activate --force`` +^^^^^^^^^^^^^^^^^^^^^^^^^^ If, for some reason, you want to activate a package *without* its -dependencies, you can use ``spack activate -f``: +dependencies, you can use ``spack activate --force``: .. code-block:: console - $ spack activate -f py-numpy + $ spack activate --force py-numpy ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7. -.. _spack-deactivate: +.. _cmd-spack-deactivate: ^^^^^^^^^^^^^^^^^^^^ ``spack deactivate`` @@ -1864,150 +1617,12 @@ This issue typically manifests with the error below: A nicer error message is TBD in future versions of Spack. -.. _cray-support: - -------------- -Spack on Cray -------------- - -Spack differs slightly when used on a Cray system. The architecture spec -can differentiate between the front-end and back-end processor and operating system. -For example, on Edison at NERSC, the back-end target processor -is "Ivy Bridge", so you can specify to use the back-end this way: - -.. code-block:: console - - $ spack install zlib target=ivybridge - -You can also use the operating system to build against the back-end: - -.. code-block:: console - - $ spack install zlib os=CNL10 - -Notice that the name includes both the operating system name and the major -version number concatenated together. - -Alternatively, if you want to build something for the front-end, -you can specify the front-end target processor. The processor for a login node -on Edison is "Sandy bridge" so we specify on the command line like so: - -.. code-block:: console - - $ spack install zlib target=sandybridge - -And the front-end operating system is: - -.. code-block:: console - - $ spack install zlib os=SuSE11 - -^^^^^^^^^^^^^^^^^^^^^^^ -Cray compiler detection -^^^^^^^^^^^^^^^^^^^^^^^ - -Spack can detect compilers using two methods. For the front-end, we treat -everything the same. The difference lies in back-end compiler detection. -Back-end compiler detection is made via the Tcl module avail command. -Once it detects the compiler it writes the appropriate PrgEnv and compiler -module name to compilers.yaml and sets the paths to each compiler with Cray\'s -compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load -the correct PrgEnv and compiler module and will call appropriate wrapper. - -The compilers.yaml config file will also differ. There is a -modules section that is filled with the compiler's Programming Environment -and module name. On other systems, this field is empty []: - -.. code-block:: yaml - - - compiler: - modules: - - PrgEnv-intel - - intel/15.0.109 - -As mentioned earlier, the compiler paths will look different on a Cray system. -Since most compilers are invoked using cc, CC and ftn, the paths for each -compiler are replaced with their respective Cray compiler wrapper names: - -.. code-block:: yaml - - paths: - cc: cc - cxx: CC - f77: ftn - fc: ftn - -As opposed to an explicit path to the compiler executable. This allows Spack -to call the Cray compiler wrappers during build time. - -For more on compiler configuration, check out :ref:`compiler-config`. - -Spack sets the default Cray link type to dynamic, to better match other -other platforms. Individual packages can enable static linking (which is the -default outside of Spack on cray systems) using the ``-static`` flag. - -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Setting defaults and using Cray modules -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -If you want to use default compilers for each PrgEnv and also be able -to load cray external modules, you will need to set up a ``packages.yaml``. - -Here's an example of an external configuration for cray modules: - -.. code-block:: yaml - - packages: - mpi: - modules: - mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich - mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich - -This tells Spack that for whatever package that depends on mpi, load the -cray-mpich module into the environment. You can then be able to use whatever -environment variables, libraries, etc, that are brought into the environment -via module load. - -You can set the default compiler that Spack can use for each compiler type. -If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml. -In the compiler field, set the compiler specs in your order of preference. -Whenever you build with that compiler type, Spack will concretize to that version. - -Here is an example of a full packages.yaml used at NERSC - -.. code-block:: yaml - - packages: - mpi: - modules: - mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich - mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich - buildable: False - netcdf: - modules: - netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf - netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf - buildable: False - hdf5: - modules: - hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5 - hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5 - buildable: False - all: - compiler: [gcc@5.2.0, intel@16.0.0.109] - -Here we tell spack that whenever we want to build with gcc use version 5.2.0 or -if we want to build with intel compilers, use version 16.0.0.109. We add a spec -for each compiler type for each cray modules. This ensures that for each -compiler on our system we can use that external module. - -For more on external packages check out the section :ref:`sec-external_packages`. ------------ Getting Help ------------ -.. _spack-help: +.. _cmd-spack-help: ^^^^^^^^^^^^^^ ``spack help`` diff --git a/lib/spack/docs/case_studies.rst b/lib/spack/docs/case_studies.rst deleted file mode 100644 index fcec636c27..0000000000 --- a/lib/spack/docs/case_studies.rst +++ /dev/null @@ -1,181 +0,0 @@ -======================================= -Using Spack for CMake-based Development -======================================= - -These are instructions on how to use Spack to aid in the development -of a CMake-based project. Spack is used to help find the dependencies -for the project, configure it at development time, and then package it -it in a way that others can install. Using Spack for CMake-based -development consists of three parts: - -#. Setting up the CMake build in your software -#. Writing the Spack Package -#. Using it from Spack. - --------------------------- -Setting Up the CMake Build --------------------------- - -You should follow standard CMake conventions in setting up your -software, your CMake build should NOT depend on or require Spack to -build. See here for an example: - -https://github.com/citibeth/icebin - -Note that there's one exception here to the rule I mentioned above. -In ``CMakeLists.txt``, I have the following line: - -.. code-block:: none - - include_directories($ENV{CMAKE_TRANSITIVE_INCLUDE_PATH}) - -This is a hook into Spack, and it ensures that all transitive -dependencies are included in the include path. It's not needed if -everything is in one tree, but it is (sometimes) in the Spack world; -when running without Spack, it has no effect. - -Note that this "feature" is controversial, could break with future -versions of GNU ld, and probably not the best to use. The best -practice is that you make sure that anything you #include is listed as -a dependency in your CMakeLists.txt. - -To be more specific: if you #inlcude something from package A and an -installed HEADER FILE in A #includes something from package B, then -you should also list B as a dependency in your CMake build. If you -depend on A but header files exported by A do NOT #include things from -B, then you do NOT need to list B as a dependency --- even if linking -to A links in libB.so as well. - -I also recommend that you set up your CMake build to use RPATHs -correctly. Not only is this a good idea and nice, but it also ensures -that your package will build the same with or without ``spack -install``. - -------------------------- -Writing the Spack Package -------------------------- - -Now that you have a CMake build, you want to tell Spack how to -configure it. This is done by writing a Spack package for your -software. See here for example: - -https://github.com/citibeth/spack/blob/efischer/develop/var/spack/repos/builtin/packages/icebin/package.py - -You need to subclass ``CMakePackage``, as is done in this example. -This enables advanced features of Spack for helping you in configuring -your software (keep reading...). Instead of an ``install()`` method -used when subclassing ``Package``, you write ``configure_args()``. -See here for more info on how this works: - -https://github.com/LLNL/spack/pull/543/files - -NOTE: if your software is not publicly available, you do not need to -set the URL or version. Or you can set up bogus URLs and -versions... whatever causes Spack to not crash. - -------------------- -Using it from Spack -------------------- - -Now that you have a Spack package, you can get Spack to setup your -CMake project for you. Use the following to setup, configure and -build your project: - -.. code-block:: console - - $ cd myproject - $ spack spconfig myproject@local - $ mkdir build; cd build - $ ../spconfig.py .. - $ make - $ make install - -Everything here should look pretty familiar here from a CMake -perspective, except that ``spack spconfig`` creates the file -``spconfig.py``, which calls CMake with arguments appropriate for your -Spack configuration. Think of it as the equivalent to running a bunch -of ``spack location -i`` commands. You will run ``spconfig.py`` -instead of running CMake directly. - -If your project is publicly available (eg on GitHub), then you can -ALSO use this setup to "just install" a release version without going -through the manual configuration/build step. Just do: - -#. Put tag(s) on the version(s) in your GitHub repo you want to be release versions. - -#. Set the ``url`` in your ``package.py`` to download a tarball for - the appropriate version. (GitHub will give you a tarball for any - version in the repo, if you tickle it the right way). For example: - - https://github.com/citibeth/icebin/tarball/v0.1.0 - - Set up versions as appropriate in your ``package.py``. (Manually - download the tarball and run ``md5sum`` to determine the - appropriate checksum for it). - -#. Now you should be able to say ``spack install myproject@version`` - and things "just work." - -NOTE... in order to use the features outlined in this post, you -currently need to use the following branch of Spack: - -https://github.com/citibeth/spack/tree/efischer/develop - -There is a pull request open on this branch ( -https://github.com/LLNL/spack/pull/543 ) and we are working to get it -integrated into the main ``develop`` branch. - ------------------------- -Activating your Software ------------------------- - -Once you've built your software, you will want to load it up. You can -use ``spack load mypackage@local`` for that in your ``.bashrc``, but -that is slow. Try stuff like the following instead: - -The following command will load the Spack-installed packages needed -for basic Python use of IceBin: - -.. code-block:: console - - $ module load `spack module find tcl icebin netcdf cmake@3.5.1` - $ module load `spack module find --dependencies tcl py-basemap py-giss` - - -You can speed up shell startup by turning these into ``module load`` commands. - -#. Cut-n-paste the script ``make_spackenv``: - - .. code-block:: sh - - #!/bin/sh - # - # Generate commands to load the Spack environment - - SPACKENV=$HOME/spackenv.sh - - spack module find --shell tcl git icebin@local ibmisc netcdf cmake@3.5.1 > $SPACKENV - spack module find --dependencies --shell tcl py-basemap py-giss >> $SPACKENV - -#. Add the following to your ``.bashrc`` file: - - .. code-block:: sh - - source $HOME/spackenv.sh - # Preferentially use your checked-out Python source - export PYTHONPATH=$HOME/icebin/pylib:$PYTHONPATH - -#. Run ``sh make_spackenv`` whenever your Spack installation changes (including right now). - ------------ -Giving Back ------------ - -If your software is publicly available, you should submit the -``package.py`` for it as a pull request to the main Spack GitHub -project. This will ensure that anyone can install your software -(almost) painlessly with a simple ``spack install`` command. See here -for how that has turned into detailed instructions that have -successfully enabled collaborators to install complex software: - -https://github.com/citibeth/icebin/blob/develop/README.rst diff --git a/lib/spack/docs/command_index.in b/lib/spack/docs/command_index.in index 8c5c97dd9c..6520352b42 100644 --- a/lib/spack/docs/command_index.in +++ b/lib/spack/docs/command_index.in @@ -1,6 +1,6 @@ -================= -Command index -================= +============= +Command Index +============= This is an alphabetical list of commands with links to the places they appear in the documentation. diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py index 57469964ee..237a062c14 100644 --- a/lib/spack/docs/conf.py +++ b/lib/spack/docs/conf.py @@ -67,28 +67,26 @@ os.environ['COLIFY_SIZE'] = '25x120' # # Generate package list using spack command # -if not os.path.exists('package_list.rst'): - with open('package_list.rst', 'w') as plist_file: - subprocess.Popen( - [spack_root + '/bin/spack', 'package-list'], stdout=plist_file) +with open('package_list.rst', 'w') as plist_file: + subprocess.Popen( + [spack_root + '/bin/spack', 'list', '--format=rst'], stdout=plist_file) # -# Find all the `spack-*` references and add them to a command index +# Find all the `cmd-spack-*` references and add them to a command index # command_names = [] for filename in glob('*rst'): with open(filename) as f: for line in f: - match = re.match(r'.. _(spack-[^:]*)', line) + match = re.match('.. _(cmd-spack-.*):', line) if match: command_names.append(match.group(1).strip()) -if not os.path.exists('command_index.rst'): - shutil.copy('command_index.in', 'command_index.rst') - with open('command_index.rst', 'a') as index: - index.write('\n') - for cmd in sorted(command_names): - index.write(' * :ref:`%s`\n' % cmd) +shutil.copy('command_index.in', 'command_index.rst') +with open('command_index.rst', 'a') as index: + index.write('\n') + for cmd in sorted(command_names): + index.write(' * :ref:`%s`\n' % cmd) # Run sphinx-apidoc diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst index 6de823c845..f4d3a65653 100644 --- a/lib/spack/docs/configuration.rst +++ b/lib/spack/docs/configuration.rst @@ -132,6 +132,65 @@ The ``buildable`` does not need to be paired with external packages. It could also be used alone to forbid packages that may be buggy or otherwise undesirable. +.. _system-packages: + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +False Paths for System Packages +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Sometimes, the externally-installed package one wishes to use with +Spack comes with the Operating System and is installed in a standard +place --- ``/usr``, for example. Many other packages are there as +well. If Spack adds it to build paths, then some packages might +pick up dependencies from ``/usr`` than the intended Spack version. + +In order to avoid this problem, it is advisable to specify a fake path +in ``packages.yaml``, thereby preventing Spack from adding the real +path to compiler command lines. This will work becuase compilers +normally search standard system paths, even if they are not on the +command line. For example: + +.. code-block:: yaml + + packages: + # Recommended for security reasons + # Do not install OpenSSL as non-root user. + openssl: + paths: + openssl@system: /false/path + version: [system] + buildable: False + + +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Extracting System Packages +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In some cases, using false paths for system packages will not work. +Some builds need to run binaries out of their dependencies, not just +access their libraries: the build needs to know the real location of +the system package. + +In this case, one can create a Spack-like single-package tree by +creating symlinks to the files related to just that package. +Depending on the OS, it is possible to obtain a list of the files in a +single OS-installed package. For example, on RedHat / Fedora: + +.. code-block:: console + + $ repoquery --list openssl-devel + ... + /usr/lib/libcrypto.so + /usr/lib/libssl.so + /usr/lib/pkgconfig/libcrypto.pc + /usr/lib/pkgconfig/libssl.pc + /usr/lib/pkgconfig/openssl.pc + ... + +Spack currently does not provide an automated way to create a symlink +tree to these files. + + .. _concretization-preferences: -------------------------- @@ -190,27 +249,3 @@ The syntax for the ``provider`` section differs slightly from other concretization rules. A provider lists a value that packages may ``depend_on`` (e.g, mpi) and a list of rules for fulfilling that dependency. - ---------- -Profiling ---------- - -Spack has some limited built-in support for profiling, and can report -statistics using standard Python timing tools. To use this feature, -supply ``-p`` to Spack on the command line, before any subcommands. - -.. _spack-p: - -^^^^^^^^^^^^^^^^^^^ -``spack --profile`` -^^^^^^^^^^^^^^^^^^^ - -``spack --profile`` output looks like this: - -.. command-output:: spack --profile graph --deptype=nobuild dyninst - :ellipsis: 25 - -The bottom of the output shows the top most time consuming functions, -slowest on top. The profiling support is from Python's built-in tool, -`cProfile -<https://docs.python.org/2/library/profile.html#module-cProfile>`_. diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst new file mode 100644 index 0000000000..108b23a6d0 --- /dev/null +++ b/lib/spack/docs/contribution_guide.rst @@ -0,0 +1,247 @@ +.. _contribution-guide: + +Contribution Guide +===================== + +This guide is intended for developers or administrators who want to +contribute a new package, feature, or bugfix to Spack. +It assumes that you have at least some familiarity with Git VCS and Github. +The guide will show a few examples of contributing workflow and discuss +the granularity of pull-requests (PRs). + +First, what is a PR? Quoting `Bitbucket's tutorials <https://www.atlassian.com/git/tutorials/making-a-pull-request/>`_: + + Pull requests are a mechanism for a developer to notify team members that they have **completed a feature**. + The pull request is more than just a notification—it’s a dedicated forum for discussing the proposed feature + +Important is completed feature, i.e. the changes one propose in a PR should +correspond to one feature/bugfix/extension/etc. One can create PRs with +changes relevant to different ideas, however reviewing such PRs becomes tedious +and error prone. If possible, try to follow the rule **one-PR-one-package/feature.** + +Spack uses a rough approximation of the `Git Flow <http://nvie.com/posts/a-successful-git-branching-model/>`_ branching +model. The develop branch contains the latest contributions, and master is +always tagged and points to the latest stable release. Thereby when you send +your request, make ``develop`` the destination branch on the +`Spack repository <https://github.com/LLNL/spack>`_. + +Let's assume that the current (patched) state of your fork of Spack is only +relevant to yourself. Now you come across a bug in a package or would like to +extend a package and contribute this fix to Spack. It is important that +whenever you change something that might be of importance upstream, +create a pull-request (PR) as soon as possible. Do not wait for weeks/months to +do this: a) you might forget why did you modified certain files; b) it could get +difficult to isolate this change into a stand-alone clean PR. + +Now let us discuss several approaches one may use to submit a PR while +also keeping your local version of Spack patched. + + +First approach (cherry-picking): +-------------------------------- + +First approach is as follows. +You checkout your local develop branch, which for the purpose of this guide +will be called ``develop_modified``: + +.. code-block:: console + + $ git checkout develop_modified + +Let us assume that lines in files you will be modifying +are the same in `develop_modified` branch and upstream ``develop``. +Next edit files, make sure they work for you and create a commit + +.. code-block:: console + + $ git add <files_to_be_commited> + $ git commit -m <descriptive note about changes> + +Normally we prefer that commits pertaining to a package ``<package-name>``` have +a message ``<package-name>: descriptive message``. It is important to add +descriptive message so that others, who might be looking at your changes later +(in a year or maybe two), would understand the rationale behind. + + +Next we will create a branch off upstream's ``develop`` and copy this commit. +Before doing this, while still on your modified branch, get the hash of the +last commit + +.. code-block:: console + + $ git log -1 + +and copy-paste this ``<hash>`` to the buffer. Now switch to upstream's ``develop``, +make sure it's updated, checkout the new branch, apply the patch and push to +GitHub: + +.. code-block:: console + + $ git checkout develop + $ git pull upstream develop + $ git checkout -b <descriptive_branch_name> + $ git cherry-pick <hash> + $ git push <your_origin> <descriptive_branch_name> -u + +Here we assume that local ``develop`` branch tracks upstream develop branch of +Spack. This is not a requirement and you could also do the same with remote +branches. Yet to some it is more convenient to have a local branch that +tracks upstream. + +Now you can create a PR from web-interface of GitHub. The net result is as +follows: + +#. You patched your local version of Spack and can use it further +#. You "cherry-picked" these changes in a stand-alone branch and submitted it + as a PR upstream. + + +Should you have several commits to contribute, you could follow the same +procedure by getting hashes of all of them and cherry-picking to the PR branch. +This could get tedious and therefore there is another way: + + +Second approach: +---------------- + +In the second approach we start from upstream ``develop`` (again assuming +that your local branch `develop` tracks upstream): + +.. code-block:: console + + $ git checkout develop + $ git pull upstream develop + $ git checkout -b <descriptive_branch_name> + +Next edit a few files and create a few commits by + +.. code-block:: console + + $ git add <files_to_be_part_of_the_commit> + $ git commit -m <descriptive_message_of_this_particular_commit> + +Now you can push it to your fork and create a PR + +.. code-block:: console + + $ git push <your_origin> <descriptive_branch_name> -u + +Most likely you would want to have those changes in your (modified) local +version of Spack. To that end you need to merge this branch + +.. code-block:: console + + $ git checkout develop_modified + $ git merge <descriptive_branch_name> + +The net result is similar to the first approach with a minor difference that +you would also merge upstream develop into you modified version in the last +step. Should this not be desirable, you have to follow the first approach. + + + +How to clean-up a branch by rewriting history: +----------------------------------------------- + +Sometimes you may end up on a branch that has a lot of commits, merges of +upstream branch and alike but it can't be rebased on ``develop`` due to a long +and convoluted history. If the current commits history is more of an experimental +nature and only the net result is important, you may rewrite the history. +To that end you need to first merge upstream `develop` and reset you branch to +it. So on the branch in question do: + +.. code-block:: console + + $ git merge develop + $ git reset develop + +At this point you your branch will point to the same commit as develop and +thereby the two are indistinguishable. However, all the files that were +previously modified will stay as such. In other words, you do not loose the +changes you made. Changes can be reviewed by looking at diffs + +.. code-block:: console + + $ git status + $ git diff + +One can also run GUI to visualize the current changes + +.. code-block:: console + + $ git difftool + +Next step is to rewrite the history by adding files and creating commits + +.. code-block:: console + + $ git add <files_to_be_part_of_commit> + $ git commit -m <descriptive_message> + + +Shall you need to split changes within a file into separate commits, use + +.. code-block:: console + + $ git add <file> -p + +After all changed files are committed, you can push the branch to your fork +and create a PR + +.. code-block:: console + + $ git push <you_origin> -u + + + +How to fix a bad rebase by "cherry-picking" commits: +---------------------------------------------------- + +Say you are working on a branch ``feature1``. It has several commits and is +ready to be merged. However, there are a few minor merge conflicts and so +you are asked to rebase onto ``develop`` upstream branch. Occasionally, it +happens so that a contributor rebases not on top of the upstream branch, but +on his/her local outdated copy of it. This would lead to an inclusion of the +whole lot of duplicated history and of course can not be merged as-is. + +One way to get out of troubles is to ``cherry-pick`` important commits. To +do that, first checkout a temporary back-up branch: + +.. code-block:: console + + git checkout -b tmp + +Now look at logs and save hashes of commits you would like to keep + +.. code-block:: console + + git log + +Next, go back to the original branch and reset it to ``develop``. +Before doing so, make sure that you local ``develop`` branch is up-to-date +with the upstream. + +.. code-block:: console + + git checkout feature1 + git reset --hard develop + +Now you can cherry-pick relevant commits + +.. code-block:: console + + git cherry-pick <hash1> + git cherry-pick <hash2> + + +push the modified branch to your fork + +.. code-block:: console + + git push -f + +and if everything looks good, delete the back-up: + +.. code-block:: console + + git branch -D tmp diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst index 04ae8fe1a1..0942fdd9c3 100644 --- a/lib/spack/docs/developer_guide.rst +++ b/lib/spack/docs/developer_guide.rst @@ -324,3 +324,27 @@ Developer commands ^^^^^^^^^^^^^^ ``spack test`` ^^^^^^^^^^^^^^ + +--------- +Profiling +--------- + +Spack has some limited built-in support for profiling, and can report +statistics using standard Python timing tools. To use this feature, +supply ``--profile`` to Spack on the command line, before any subcommands. + +.. _spack-p: + +^^^^^^^^^^^^^^^^^^^ +``spack --profile`` +^^^^^^^^^^^^^^^^^^^ + +``spack --profile`` output looks like this: + +.. command-output:: spack --profile graph dyninst + :ellipsis: 25 + +The bottom of the output shows the top most time consuming functions, +slowest on top. The profiling support is from Python's built-in tool, +`cProfile +<https://docs.python.org/2/library/profile.html#module-cProfile>`_. diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst index 57629997aa..2df1d182d6 100644 --- a/lib/spack/docs/features.rst +++ b/lib/spack/docs/features.rst @@ -1,5 +1,5 @@ ================ -Feature overview +Feature Overview ================ This is a high-level overview of features that make Spack different diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst index 676697a549..7510693b18 100644 --- a/lib/spack/docs/getting_started.rst +++ b/lib/spack/docs/getting_started.rst @@ -1,22 +1,46 @@ +.. _getting_started: + =============== Getting Started =============== --------- -Download --------- +------------- +Prerequisites +------------- + +Spack has the following minimum requirements, which must be installed +before Spack is run: + +1. Python 2.6 or 2.7 +2. A C/C++ compiler +3. The ``git`` and ``curl`` commands. + +These requirements can be easily installed on most modern Linux systems; +on Macintosh, XCode is required. Spack is designed to run on HPC +platforms like Cray and BlueGene/Q. Not all packages should be expected +to work on all platforms. A build matrix showing which packages are +working on which systems is planned but not yet available. + +------------ +Installation +------------ -Getting spack is easy. You can clone it from the `github repository +Getting Spack is easy. You can clone it from the `github repository <https://github.com/llnl/spack>`_ using this command: .. code-block:: console $ git clone https://github.com/llnl/spack.git -This will create a directory called ``spack``. We'll assume that the -full path to this directory is in the ``SPACK_ROOT`` environment -variable. Add ``$SPACK_ROOT/bin`` to your path and you're ready to -go: +This will create a directory called ``spack``. + +^^^^^^^^^^^^^^^^^^^^^^^^ +Add Spack to the Shell +^^^^^^^^^^^^^^^^^^^^^^^^ + +We'll assume that the full path to your downloaded Spack directory is +in the ``SPACK_ROOT`` environment variable. Add ``$SPACK_ROOT/bin`` +to your path and you're ready to go: .. code-block:: console @@ -38,14 +62,46 @@ For a richer experience, use Spack's `shell support This automatically adds Spack to your ``PATH``. ------------- -Installation ------------- +^^^^^^^^^^^^^^^^^ +Clean Environment +^^^^^^^^^^^^^^^^^ + +Many packages' installs can be broken by changing environment +variables. For example, a package might pick up the wrong build-time +dependencies (most of them not specified) depending on the setting of +``PATH``. ``GCC`` seems to be particularly vulnerable to these issues. + +Therefore, it is recommended that Spack users run with a *clean +environment*, especially for ``PATH``. Only software that comes with +the system, or that you know you wish to use with Spack, should be +included. This procedure will avoid many strange build errors. + + +^^^^^^^^^^^^^^^^^^ +Check Installation +^^^^^^^^^^^^^^^^^^ + +With Spack installed, you should be able to run some basic Spack +commands. For example: + +.. code-block:: console -You don't need to install Spack; it's ready to run as soon as you -clone it from git. + $ spack spec netcdf + ... + netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64 + ^curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64 + ^openmpi@1.10.1%gcc@5.3.0~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-SuSE11-x86_64 + ^m4@1.4.17%gcc@5.3.0+sigsegv arch=linux-SuSE11-x86_64 + ^libsigsegv@2.10%gcc@5.3.0 arch=linux-SuSE11-x86_64 -You may want to run it out of a prefix other than the git repository +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Optional: Alternate Prefix +^^^^^^^^^^^^^^^^^^^^^^^^^^ + +You may want to run Spack out of a prefix other than the git repository you cloned. The ``spack bootstrap`` command provides this functionality. To install spack in a new directory, simply type: @@ -57,3 +113,998 @@ This will install a new spack script in ``/my/favorite/prefix/bin``, which you can use just like you would the regular spack script. Each copy of spack installs packages into its own ``$PREFIX/opt`` directory. + + +^^^^^^^^^^ +Next Steps +^^^^^^^^^^ + +In theory, Spack doesn't need any additional installation; just +download and run! But in real life, additional steps are usually +required before Spack can work in a practical sense. Read on... + + +.. _compiler-config: + +---------------------- +Compiler configuration +---------------------- + +Spack has the ability to build packages with multiple compilers and +compiler versions. Spack searches for compilers on your machine +automatically the first time it is run. It does this by inspecting +your ``PATH``. + +.. _cmd-spack-compilers: + +^^^^^^^^^^^^^^^^^^^ +``spack compilers`` +^^^^^^^^^^^^^^^^^^^ + +You can see which compilers spack has found by running ``spack +compilers`` or ``spack compiler list``: + +.. code-block:: console + + $ spack compilers + ==> Available compilers + -- gcc --------------------------------------------------------- + gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7 + gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2 + -- intel ------------------------------------------------------- + intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0 + intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1 + intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1 + intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1 + -- clang ------------------------------------------------------- + clang@3.4 clang@3.3 clang@3.2 clang@3.1 + -- pgi --------------------------------------------------------- + pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1 + pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3 + pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6 + +Any of these compilers can be used to build Spack packages. More on +how this is done is in :ref:`sec-specs`. + +.. _spack-compiler-add: + +^^^^^^^^^^^^^^^^^^^^^^ +``spack compiler add`` +^^^^^^^^^^^^^^^^^^^^^^ + +An alias for ``spack compiler find``. + +.. _spack-compiler-find: + +^^^^^^^^^^^^^^^^^^^^^^^ +``spack compiler find`` +^^^^^^^^^^^^^^^^^^^^^^^ + +If you do not see a compiler in this list, but you want to use it with +Spack, you can simply run ``spack compiler find`` with the path to +where the compiler is installed. For example: + +.. code-block:: console + + $ spack compiler find /usr/local/tools/ic-13.0.079 + ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml + intel@13.0.079 + +Or you can run ``spack compiler find`` with no arguments to force +auto-detection. This is useful if you do not know where compilers are +installed, but you know that new compilers have been added to your +``PATH``. For example, you might load a module, like this: + +.. code-block:: console + + $ module load gcc-4.9.0 + $ spack compiler find + ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml + gcc@4.9.0 + +This loads the environment module for gcc-4.9.0 to add it to +``PATH``, and then it adds the compiler to Spack. + +.. _spack-compiler-info: + +^^^^^^^^^^^^^^^^^^^^^^^ +``spack compiler info`` +^^^^^^^^^^^^^^^^^^^^^^^ + +If you want to see specifics on a particular compiler, you can run +``spack compiler info`` on it: + +.. code-block:: console + + $ spack compiler info intel@15 + intel@15.0.0: + cc = /usr/local/bin/icc-15.0.090 + cxx = /usr/local/bin/icpc-15.0.090 + f77 = /usr/local/bin/ifort-15.0.090 + fc = /usr/local/bin/ifort-15.0.090 + modules = [] + operating system = centos6 + +This shows which C, C++, and Fortran compilers were detected by Spack. +Notice also that we didn't have to be too specific about the +version. We just said ``intel@15``, and information about the only +matching Intel compiler was displayed. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Manual compiler configuration +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If auto-detection fails, you can manually configure a compiler by +editing your ``~/.spack/compilers.yaml`` file. You can do this by running +``spack config edit compilers``, which will open the file in your ``$EDITOR``. + +Each compiler configuration in the file looks like this: + +.. code-block:: yaml + + compilers: + - compiler: + modules = [] + operating_system: centos6 + paths: + cc: /usr/local/bin/icc-15.0.024-beta + cxx: /usr/local/bin/icpc-15.0.024-beta + f77: /usr/local/bin/ifort-15.0.024-beta + fc: /usr/local/bin/ifort-15.0.024-beta + spec: intel@15.0.0: + +For compilers, like ``clang``, that do not support Fortran, put +``None`` for ``f77`` and ``fc``: + +.. code-block:: yaml + + paths: + cc: /usr/bin/clang + cxx: /usr/bin/clang++ + f77: None + fc: None + spec: clang@3.3svn: + +Once you save the file, the configured compilers will show up in the +list displayed by ``spack compilers``. + +You can also add compiler flags to manually configured compilers. The +valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, +``ldflags``, and ``ldlibs``. For example: + +.. code-block:: yaml + + compilers: + - compiler: + modules = [] + operating_system: OS + paths: + cc: /usr/local/bin/icc-15.0.024-beta + cxx: /usr/local/bin/icpc-15.0.024-beta + f77: /usr/local/bin/ifort-15.0.024-beta + fc: /usr/local/bin/ifort-15.0.024-beta + parameters: + cppflags: -O3 -fPIC + spec: intel@15.0.0: + +These flags will be treated by spack as if they were entered from +the command line each time this compiler is used. The compiler wrappers +then inject those flags into the compiler command. Compiler flags +entered from the command line will be discussed in more detail in the +following section. + +^^^^^^^^^^^^^^^^^^^^^^^ +Build Your Own Compiler +^^^^^^^^^^^^^^^^^^^^^^^ + +If you are particular about which compiler/version you use, you might +wish to have Spack build it for you. For example: + +.. code-block:: console + + $ spack install gcc@4.9.3 + +Once that has finished, you will need to add it to your +``compilers.yaml`` file. You can then set Spack to use it by default +by adding the following to your ``packages.yaml`` file: + +.. code-block:: yaml + + packages: + all: + compiler: [gcc@4.9.3] + + +.. note:: + + If you are building your own compiler, some users prefer to have a + Spack instance just for that. For example, create a new Spack in + ``~/spack-tools`` and then run ``~/spack-tools/bin/spack install + gcc@4.9.3``. Once the compiler is built, don't build anything + more in that Spack instance; instead, create a new "real" Spack + instance, configure Spack to use the compiler you've just built, + and then build your application software in the new Spack + instance. Following this tip makes it easy to delete all your + Spack packages *except* the compiler. + + +^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Compilers Requiring Modules +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Many installed compilers will work regardless of the environment they +are called with. However, some installed compilers require +``$LD_LIBRARY_PATH`` or other environment variables to be set in order +to run; this is typical for Intel and other proprietary compilers. + +In such a case, you should tell Spack which module(s) to load in order +to run the chosen compiler (If the compiler does not come with a +module file, you might consider making one by hand). Spack will load +this module into the environment ONLY when the compiler is run, and +NOT in general for a package's ``install()`` method. See, for +example, this ``compilers.yaml`` file: + +.. code-block:: yaml + + compilers: + - compiler: + modules: [other/comp/gcc-5.3-sp3] + operating_system: SuSE11 + paths: + cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc + cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++ + f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran + fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran + spec: gcc@5.3.0 + +Some compilers require special environment settings to be loaded not just +to run, but also to execute the code they build, breaking packages that +need to execute code they just compiled. If it's not possible or +practical to use a better compiler, you'll need to ensure that +environment settings are preserved for compilers like this (i.e., you'll +need to load the module or source the compiler's shell script). + +By default, Spack tries to ensure that builds are reproducible by +cleaning the environment before building. If this interferes with your +compiler settings, you CAN use ``spack install --dirty`` as a workaround. +Note that this MAY interfere with package builds. + +.. _licensed-compilers: + +^^^^^^^^^^^^^^^^^^ +Licensed Compilers +^^^^^^^^^^^^^^^^^^ + +Some proprietary compilers require licensing to use. If you need to +use a licensed compiler (eg, PGI), the process is similar to a mix of +build your own, plus modules: + +#. Create a Spack package (if it doesn't exist already) to install + your compiler. Follow instructions on installing :ref:`license`. + +#. Once the compiler is installed, you should be able to test it by + using Spack to load the module it just created, and running simple + builds (eg: ``cc helloWorld.c; ./a.out``) + +#. Add the newly-installed compiler to ``compilers.yaml`` as shown + above. + +^^^^^^^^^^^^^^^^ +Mixed Toolchains +^^^^^^^^^^^^^^^^ + +Modern compilers typically come with related compilers for C, C++ and +Fortran bundled together. When possible, results are best if the same +compiler is used for all languages. + +In some cases, this is not possible. For example, starting with macOS El +Capitan (10.11), many packages no longer build with GCC, but XCode +provides no Fortran compilers. The user is therefore forced to use a +mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for +Fortran. + +In the simplest case, you can just edit ``compilers.yaml``: + + .. code-block:: yaml + + compilers: + darwin-x86_64: + clang@7.3.0-apple: + cc: /usr/bin/clang + cxx: /usr/bin/clang++ + f77: /path/to/bin/gfortran + fc: /path/to/bin/gfortran + +.. note:: + + If you are building packages that are sensitive to the compiler's + name, you may also need to slightly modify a few more files so that + Spack uses compiler names the build system will recognize. + + Following are instructions on how to hack together + ``clang`` and ``gfortran`` on Macintosh OS X. A similar approach + should work for other mixed toolchain needs. + + Better support for mixed compiler toolchains is planned in forthcoming + Spack versions. + + #. Create a symlink inside ``clang`` environment: + + .. code-block:: console + + $ cd $SPACK_ROOT/lib/spack/env/clang + $ ln -s ../cc gfortran + + + #. Patch ``clang`` compiler file: + + .. code-block:: diff + + $ diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py + index e406d86..cf8fd01 100644 + --- a/lib/spack/spack/compilers/clang.py + +++ b/lib/spack/spack/compilers/clang.py + @@ -35,17 +35,17 @@ class Clang(Compiler): + cxx_names = ['clang++'] + + # Subclasses use possible names of Fortran 77 compiler + - f77_names = [] + + f77_names = ['gfortran'] + + # Subclasses use possible names of Fortran 90 compiler + - fc_names = [] + + fc_names = ['gfortran'] + + # Named wrapper links within spack.build_env_path + link_paths = { 'cc' : 'clang/clang', + 'cxx' : 'clang/clang++', + # Use default wrappers for fortran, in case provided in compilers.yaml + - 'f77' : 'f77', + - 'fc' : 'f90' } + + 'f77' : 'clang/gfortran', + + 'fc' : 'clang/gfortran' } + + @classmethod + def default_version(self, comp): + +^^^^^^^^^^^^^^^^^^^^^ +Compiler Verification +^^^^^^^^^^^^^^^^^^^^^ + +You can verify that your compilers are configured properly by installing a +simple package. For example: + +.. code-block:: console + + $ spack install zlib%gcc@5.3.0 + +-------------------------------------- +Vendor-Specific Compiler Configuration +-------------------------------------- + +With Spack, things usually "just work" with GCC. Not so for other +compilers. This section provides details on how to get specific +compilers working. + +^^^^^^^^^^^^^^^ +Intel Compilers +^^^^^^^^^^^^^^^ + +Intel compilers are unusual because a single Intel compiler version +can emulate multiple GCC versions. In order to provide this +functionality, the Intel compiler needs GCC to be installed. +Therefore, the following steps are necessary to successfully use Intel +compilers: + +#. Install a version of GCC that implements the desired language + features (``spack install gcc``). + +#. Tell the Intel compiler how to find that desired GCC. This may be + done in one of two ways: (text taken from `Intel Reference Guide + <https://software.intel.com/en-us/node/522750>`_): + + > By default, the compiler determines which version of ``gcc`` or ``g++`` + > you have installed from the ``PATH`` environment variable. + > + > If you want use a version of ``gcc`` or ``g++`` other than the default + > version on your system, you need to use either the ``-gcc-name`` + > or ``-gxx-name`` compiler option to specify the path to the version of + > ``gcc`` or ``g++`` that you want to use. + +Intel compilers may therefore be configured in one of two ways with +Spack: using modules, or using compiler flags. + +"""""""""""""""""""""""""" +Configuration with Modules +"""""""""""""""""""""""""" + +One can control which GCC is seen by the Intel compiler with modules. +A module must be loaded both for the Intel Compiler (so it will run) +and GCC (so the compiler can find the intended GCC). The following +configuration in ``compilers.yaml`` illustrates this technique: + +.. code-block:: yaml + + compilers: + - compiler: + modules = [gcc-4.9.3, intel-15.0.24] + operating_system: centos7 + paths: + cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta + cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta + f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta + fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta + spec: intel@15.0.24.4.9.3 + + +.. note:: + + The version number on the Intel compiler is a combination of + the "native" Intel version number and the GNU compiler it is + targeting. + +"""""""""""""""""""""""""" +Command Line Configuration +"""""""""""""""""""""""""" + +. warning:: + + As of the writing of this manual, added compilers flags are broken; + see `GitHub Issue <https://github.com/LLNL/spack/pull/1532>`_. + +One can also control which GCC is seen by the Intel compiler by adding +flags to the ``icc`` command: + +#. Identify the location of the compiler you just installed: + + .. code-block:: console + + $ spack location --install-dir gcc + /home2/rpfische/spack2/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw... + +#. Set up ``compilers.yaml``, for example: + + .. code-block:: yaml + + compilers: + - compiler: + modules = [intel-15.0.24] + operating_system: centos7 + paths: + cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta + cflags: -gcc-name /home2/rpfische/spack2/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc + cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta + cxxflags: -gxx-name /home2/rpfische/spack2/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/g++ + f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta + fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta + fflags: -gcc-name /home2/rpfische/spack2/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc + spec: intel@15.0.24.4.9.3 + + +^^^ +PGI +^^^ + +PGI comes with two sets of compilers for C++ and Fortran, +distinguishable by their names. "Old" compilers: + +.. code-block:: yaml + + cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc + cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC + f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77 + fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90 + +"New" compilers: + +.. code-block:: yaml + + cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc + cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++ + f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran + fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran + +Older installations of PGI contains just the old compilers; whereas +newer installations contain the old and the new. The new compiler is +considered preferable, as some packages +(``hdf4``) will not build with the old compiler. + +When auto-detecting a PGI compiler, there are cases where Spack will +find the old compilers, when you really want it to find the new +compilers. It is best to check this ``compilers.yaml``; and if the old +compilers are being used, change ``pgf77`` and ``pgf90`` to +``pgfortran``. + +Other issues: + +* There are reports that some packages will not build with PGI, + including ``libpciaccess`` and ``openssl``. A workaround is to + build these packages with another compiler and then use them as + dependencies for PGI-build packages. For example: + + .. code-block:: console + + $ spack install openmpi%pgi ^libpciaccess%gcc + + +* PGI requires a license to use; see :ref:`licensed-compilers` for more + information on installation. + +.. note:: + + It is believed the problem with ``hdf4`` is that everything is + compiled with the ``F77`` compiler, but at some point some Fortran + 90 code slipped in there. So compilers that can handle both FORTRAN + 77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But + compilers specific to one or the other (``pgf77``, ``pgf90``) won't + work. + + +^^^ +NAG +^^^ + +At this point, the NAG compiler is `known to not +work<https://github.com/LLNL/spack/issues/590>`. + + +--------------- +System Packages +--------------- + +Once compilers are configured, one needs to determine which +pre-installed system packages, if any, to use in builds. This is +configured in the file `~/.spack/packages.yaml`. For example, to use +an OpenMPI installed in /opt/local, one would use: + +.. code-block:: yaml + + packages: + openmpi: + paths: + openmpi@1.10.1: /opt/local + buildable: False + +In general, Spack is easier to use and more reliable if it builds all of +its own dependencies. However, there are two packages for which one +commonly needs to use system versions: + +^^^ +MPI +^^^ + +On supercomputers, sysadmins have already built MPI versions that take +into account the specifics of that computer's hardware. Unless you +know how they were built and can choose the correct Spack variants, +you are unlikely to get a working MPI from Spack. Instead, use an +appropriate pre-installed MPI. + +If you choose a pre-installed MPI, you should consider using the +pre-installed compiler used to build that MPI; see above on +``compilers.yaml``. + +^^^^^^^ +OpenSSL +^^^^^^^ + +The ``openssl`` package underlies much of modern security in a modern +OS; an attacker can easily "pwn" any computer on which they can modify SSL. +Therefore, any ``openssl`` used on a system should be created in a +"trusted environment" --- for example, that of the OS vendor. + +OpenSSL is also updated by the OS vendor from time to time, in +response to security problems discovered in the wider community. It +is in everyone's best interest to use any newly updated versions as +soon as they come out. Modern Linux installations have standard +procedures for security updates without user involvement. + +Spack running at user-level is not a trusted environment, nor do Spack +users generally keep up-to-date on the latest security holes in SSL. For +these reasons, a Spack-installed OpenSSL should likely not be trusted. + +As long as the system-provided SSL works, you can use it instead. One +can check if it works by trying to download an ``https://``. For +example: + +.. code-block:: console + + $ curl -O https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz + +The recommended way to tell Spack to use the system-supplied OpenSSL is +to add the following to ``packages.yaml``. Note that the ``@system`` +"version" means "I don't care what version it is, just use what is +there." This is reasonable for OpenSSL, which has a stable API. + + +.. code-block:: yaml + + packages: + openssl: + paths: + openssl@system: /false/path + version: [system] + buildable: False + +.. note:: + + Even though OpenSSL is located in ``/usr``, We have told Spack to + look for it in ``/false/path``. This prevents ``/usr`` from being + added to compilation paths and RPATHs, where it could cause + unrelated system libraries to be used instead of their Spack + equivalents. + + The adding of ``/usr`` to ``RPATH`` in this sitution is a known issue + and will be fixed in a future release. + + +^^^ +Git +^^^ + +Some Spack packages use ``git`` to download, which might not work on +some computers. For example, the following error was +encountered on a Macintosh during ``spack install julia-master``: + +.. code-block:: console + + ==> Trying to clone git repository: + https://github.com/JuliaLang/julia.git + on branch master + Cloning into 'julia'... + fatal: unable to access 'https://github.com/JuliaLang/julia.git/': + SSL certificate problem: unable to get local issuer certificate + +This problem is related to OpenSSL, and in some cases might be solved +by installing a new version of ``git`` and ``openssl``: + +#. Run ``spack install git`` +#. Add the output of ``spack module loads git`` to your ``.bahsrc``. + +If this doesn't work, it is also possible to disable checking of SSL +certificates by using: + +.. code-block:: console + + $ spack --insecure install + +Using ``--insecure`` makes Spack disable SSL checking when fetching +from websites and from git. + +.. warning:: + + This workaround should be used ONLY as a last resort! Wihout SSL + certificate verification, spack and git will download from sites you + wouldn't normally trust. The code you download and run may then be + compromised! While this is not a major issue for archives that will + be checksummed, it is especially problematic when downloading from + name Git branches or tags, which relies entirely on trusting a + certificate for security (no verification). + +----------------------- +Utilities Configuration +----------------------- + +Although Spack does not need installation *per se*, it does rely on +other packages to be available on its host system. If those packages +are out of date or missing, then Spack will not work. Sometimes, an +appeal to the system's package manager can fix such problems. If not, +the solution is have Spack install the required packages, and then +have Spack use them. + +For example, if `curl` doesn't work, one could use the following steps +to provide Spack a working `curl`: + +.. code-block:: console + + $ spack install curl + $ spack load curl + +or alternately: + +.. code-block:: console + + $ spack module loads curl >>~/.bashrc + +or if environment modules don't work: + +.. code-block:: console + + $ export PATH=`spack location --install-dir curl`/bin:$PATH + + +External commands are used by Spack in two places: within core Spack, +and in the package recipes. The bootstrapping procedure for these two +cases is somewhat different, and is treated separately below. + +^^^^^^^^^^^^^^^^^^^^ +Core Spack Utilities +^^^^^^^^^^^^^^^^^^^^ + +Core Spack uses the following packages, mainly to download and unpack +source code, and to load generated environment modules: ``curl``, +``env``, ``git``, ``go``, ``hg``, ``svn``, ``tar``, ``unzip``, +``patch``, ``environment-modules``. + +As long as the user's environment is set up to successfully run these +programs from outside of Spack, they should work inside of Spack as +well. They can generally be activated as in the `curl` example above; +or some systems might already have an appropriate hand-built +environment module that may be loaded. Either way works. + +A few notes on specific programs in this list: + +"""""""""""""""""""""""""" +cURL, git, Mercurial, etc. +"""""""""""""""""""""""""" + +Spack depends on cURL to download tarballs, the format that most +Spack-installed packages come in. Your system's cURL should always be +able to download unencrypted ``http://``. However, the cURL on some +systems has problems with SSL-enabled ``https://`` URLs, due to +outdated / insecure versions of OpenSSL on those systems. This will +prevent Spack from installing any software requiring ``https://`` +until a new cURL has been installed, using the technique above. + +.. warning:: + + remember that if you install ``curl`` via Spack that it may rely on a + user-space OpenSSL that is not upgraded regularly. It may fall out of + date faster than your system OpenSSL. + +Some packages use source code control systems as their download method: +``git``, ``hg``, ``svn`` and occasionally ``go``. If you had to install +a new ``curl``, then chances are the system-supplied version of these +other programs will also not work, because they also rely on OpenSSL. +Once ``curl`` has been installed, you can similarly install the others. + + +.. _InstallEnvironmentModules: + +""""""""""""""""""" +Environment Modules +""""""""""""""""""" + +In order to use Spack's generated environment modules, you must have +installed one of *Environment Modules* or *Lmod*. On many Linux +distributions, this can be installed from the vendor's repository. For +example: ``yum install environment-modules`` (Fedora/RHEL/CentOS). If +your Linux distribution does not have Environment Modules, you can get it +with Spack: + +#. Consider using system tcl (as long as your system has Tcl version 8.0 or later): + #) Identify its location using ``which tclsh`` + #) Identify its version using ``echo 'puts $tcl_version;exit 0' | tclsh`` + #) Add to ``~/.spack/packages.yaml`` and modify as appropriate: + + .. code-block:: yaml + + packages: + tcl: + paths: + tcl@8.5: /usr + version: [8.5] + buildable: False + +#. Install with: + + .. code-block:: console + + $ spack install environment-modules + +#. Activate with the following script (or apply the updates to your + ``.bashrc`` file manually): + + .. code-block:: sh + + TMP=`tempfile` + echo >$TMP + MODULE_HOME=`spack location --install-dir environment-modules` + MODULE_VERSION=`ls -1 $MODULE_HOME/Modules | head -1` + ${MODULE_HOME}/Modules/${MODULE_VERSION}/bin/add.modules <$TMP + cp .bashrc $TMP + echo "MODULE_VERSION=${MODULE_VERSION}" > .bashrc + cat $TMP >>.bashrc + +This adds to your ``.bashrc`` (or similar) files, enabling Environment +Modules when you log in. Re-load your .bashrc (or log out and in +again), and then test that the ``module`` command is found with: + +.. code-block:: console + + $ module avail + + +^^^^^^^^^^^^^^^^^ +Package Utilities +^^^^^^^^^^^^^^^^^ + +Spack may also encounter bootstrapping problems inside a package's +``install()`` method. In this case, Spack will normally be running +inside a *sanitized build environment*. This includes all of the +package's dependencies, but none of the environment Spack inherited +from the user: if you load a module or modify ``$PATH`` before +launching Spack, it will have no effect. + +In this case, you will likely need to use the ``--dirty`` flag when +running ``spack install``, causing Spack to **not** sanitize the build +environment. You are now responsible for making sure that environment +does not do strange things to Spack or its installs. + +Another way to get Spack to use its own version of something is to add +that something to a package that needs it. For example: + +.. code-block:: python + + depends_on('binutils', type='build') + +This is considered best practice for some common build dependencies, +such as ``autotools`` (if the ``autoreconf`` command is needed) and +``cmake`` --- ``cmake`` especially, because different packages require +a different version of CMake. + +"""""""" +binutils +"""""""" + +.. https://groups.google.com/forum/#!topic/spack/i_7l_kEEveI + +Sometimes, strange error messages can happen while building a package. +For example, ``ld`` might crash. Or one receives a message like: + +.. code-block:: console + + ld: final link failed: Nonrepresentable section on output + + +or: + +.. code-block:: console + + ld: .../_fftpackmodule.o: unrecognized relocation (0x2a) in section `.text' + +These problems are often caused by an outdated ``binutils`` on your +system. Unlike CMake or Autotools, adding ``depends_on('binutils')`` to +every package is not considered a best practice because every package +written in C/C++/Fortran would need it. A potential workaround is to +load a recent ``binutils`` into your environment and use the ``--dirty`` +flag. + + +.. _cray-support: + +------------- +Spack on Cray +------------- + +Spack differs slightly when used on a Cray system. The architecture spec +can differentiate between the front-end and back-end processor and operating system. +For example, on Edison at NERSC, the back-end target processor +is "Ivy Bridge", so you can specify to use the back-end this way: + +.. code-block:: console + + $ spack install zlib target=ivybridge + +You can also use the operating system to build against the back-end: + +.. code-block:: console + + $ spack install zlib os=CNL10 + +Notice that the name includes both the operating system name and the major +version number concatenated together. + +Alternatively, if you want to build something for the front-end, +you can specify the front-end target processor. The processor for a login node +on Edison is "Sandy bridge" so we specify on the command line like so: + +.. code-block:: console + + $ spack install zlib target=sandybridge + +And the front-end operating system is: + +.. code-block:: console + + $ spack install zlib os=SuSE11 + +^^^^^^^^^^^^^^^^^^^^^^^ +Cray compiler detection +^^^^^^^^^^^^^^^^^^^^^^^ + +Spack can detect compilers using two methods. For the front-end, we treat +everything the same. The difference lies in back-end compiler detection. +Back-end compiler detection is made via the Tcl module avail command. +Once it detects the compiler it writes the appropriate PrgEnv and compiler +module name to compilers.yaml and sets the paths to each compiler with Cray\'s +compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load +the correct PrgEnv and compiler module and will call appropriate wrapper. + +The compilers.yaml config file will also differ. There is a +modules section that is filled with the compiler's Programming Environment +and module name. On other systems, this field is empty []: + +.. code-block:: yaml + + - compiler: + modules: + - PrgEnv-intel + - intel/15.0.109 + +As mentioned earlier, the compiler paths will look different on a Cray system. +Since most compilers are invoked using cc, CC and ftn, the paths for each +compiler are replaced with their respective Cray compiler wrapper names: + +.. code-block:: yaml + + paths: + cc: cc + cxx: CC + f77: ftn + fc: ftn + +As opposed to an explicit path to the compiler executable. This allows Spack +to call the Cray compiler wrappers during build time. + +For more on compiler configuration, check out :ref:`compiler-config`. + +Spack sets the default Cray link type to dynamic, to better match other +other platforms. Individual packages can enable static linking (which is the +default outside of Spack on cray systems) using the ``-static`` flag. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Setting defaults and using Cray modules +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you want to use default compilers for each PrgEnv and also be able +to load cray external modules, you will need to set up a ``packages.yaml``. + +Here's an example of an external configuration for cray modules: + +.. code-block:: yaml + + packages: + mpi: + modules: + mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich + mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich + +This tells Spack that for whatever package that depends on mpi, load the +cray-mpich module into the environment. You can then be able to use whatever +environment variables, libraries, etc, that are brought into the environment +via module load. + +You can set the default compiler that Spack can use for each compiler type. +If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml. +In the compiler field, set the compiler specs in your order of preference. +Whenever you build with that compiler type, Spack will concretize to that version. + +Here is an example of a full packages.yaml used at NERSC + +.. code-block:: yaml + + packages: + mpi: + modules: + mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich + mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich + buildable: False + netcdf: + modules: + netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf + netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf + buildable: False + hdf5: + modules: + hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5 + hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5 + buildable: False + all: + compiler: [gcc@5.2.0, intel@16.0.0.109] + +Here we tell spack that whenever we want to build with gcc use version 5.2.0 or +if we want to build with intel compilers, use version 16.0.0.109. We add a spec +for each compiler type for each cray modules. This ensures that for each +compiler on our system we can use that external module. + +For more on external packages check out the section :ref:`sec-external_packages`. diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst index 45efcf131f..2463a208ab 100644 --- a/lib/spack/docs/index.rst +++ b/lib/spack/docs/index.rst @@ -37,25 +37,35 @@ package: If you're new to spack and want to start using it, see :doc:`getting_started`, or refer to the full manual below. ------------------ -Table of Contents ------------------ .. toctree:: :maxdepth: 2 + :caption: Tutorials features getting_started basic_usage - packaging_guide - mirrors + workflows + +.. toctree:: + :maxdepth: 2 + :caption: Reference Manual + configuration - developer_guide - case_studies - command_index + mirrors package_list + command_index + +.. toctree:: + :maxdepth: 2 + :caption: Contributing to Spack + + contribution_guide + packaging_guide + developer_guide API Docs <spack> + ================== Indices and tables ================== diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst index c4d275a8fc..87ce3d96a2 100644 --- a/lib/spack/docs/mirrors.rst +++ b/lib/spack/docs/mirrors.rst @@ -52,7 +52,7 @@ contains tarballs for each package, named after each package. not standardize on a particular compression algorithm, because this would potentially require expanding and re-compressing each archive. -.. _spack-mirror: +.. _cmd-spack-mirror: ---------------- ``spack mirror`` @@ -148,7 +148,7 @@ can supply a file with specs in it, one per line: boost@1.44: boost@1.39.0 ... - $ spack mirror create -f specs.txt + $ spack mirror create --file specs.txt ... This is useful if there is a specific suite of software managed by @@ -237,7 +237,7 @@ as other Spack mirrors (so it can be copied anywhere and referenced with a URL like other mirrors). The mirror is maintained locally (within the Spack installation directory) at :file:`var/spack/cache/`. It is always enabled (and is always searched first when attempting to retrieve files for an installation) -but can be cleared with :ref:`purge <spack-purge>`; the cache directory can also +but can be cleared with :ref:`purge <cmd-spack-purge>`; the cache directory can also be deleted manually without issue. Caching includes retrieved tarball archives and source control repositories, but diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst index 70cd58f6c1..155bbcdb08 100644 --- a/lib/spack/docs/packaging_guide.rst +++ b/lib/spack/docs/packaging_guide.rst @@ -33,7 +33,7 @@ easy. Creating & editing packages --------------------------- -.. _spack-create: +.. _cmd-spack-create: ^^^^^^^^^^^^^^^^ ``spack create`` @@ -81,7 +81,7 @@ Spack will automatically download the number of tarballs you specify You do not *have* to download all of the versions up front. You can always choose to download just one tarball initially, and run -:ref:`spack checksum <spack-checksum>` later if you need more. +:ref:`cmd-spack-checksum` later if you need more. .. note:: @@ -93,14 +93,14 @@ always choose to download just one tarball initially, and run $ spack create --name cmake http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz If it fails entirely, you can get minimal boilerplate by using - :ref:`spack-edit-f`, or you can manually create a directory and + :ref:`spack edit --force <spack-edit-f>`, or you can manually create a directory and ``package.py`` file for the package in ``var/spack/repos/builtin/packages``. .. note:: Spack can fetch packages from source code repositories, but, ``spack create`` will *not* currently create a boilerplate package - from a repository URL. You will need to use :ref:`spack-edit-f` + from a repository URL. You will need to use :ref:`spack edit --force <spack-edit-f>` and manually edit the ``version()`` directives to fetch from a repo. See :ref:`vcs-fetch` for details. @@ -198,7 +198,7 @@ The rest of the tasks you need to do are as follows: Before going into details, we'll cover a few more basics. -.. _spack-edit: +.. _cmd-spack-edit: ^^^^^^^^^^^^^^ ``spack edit`` @@ -238,7 +238,7 @@ package: .. code-block:: console - $ spack edit -f foo + $ spack edit --force foo Unlike ``spack create``, which infers names and versions, and which actually downloads the tarball and checksums it for you, ``spack edit @@ -271,8 +271,8 @@ Naming & directory structure ---------------------------- This section describes how packages need to be named, and where they -live in Spack's directory structure. In general, :ref:`spack-create` and -:ref:`spack-edit` handle creating package files for you, so you can skip +live in Spack's directory structure. In general, :ref:`cmd-spack-create` and +:ref:`cmd-spack-edit` handle creating package files for you, so you can skip most of the details here. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -371,7 +371,134 @@ some examples: ================= ================= In general, you won't have to remember this naming convention because -:ref:`spack-create` and :ref:`spack-edit` handle the details for you. +:ref:`cmd-spack-create` and :ref:`cmd-spack-edit` handle the details for you. + +----------------- +Trusted Downloads +----------------- + +Spack verifies that the source code it downloads is not corrupted or +compromised; or at least, that it is the same version the author of +the Spack package saw when the package was created. If Spack uses a +download method it can verify, we say the download method is +*trusted*. Trust is important for *all downloads*: Spack +has no control over the security of the various sites from which it +downloads source code, and can never assume that any particular site +hasn't been compromised. + +Trust is established in different ways for different download methods. +For the most common download method --- a single-file tarball --- the +tarball is checksummed. Git downloads using ``commit=`` are trusted +implicitly, as long as a hash is specified. + +Spack also provides untrusted download methods: tarball URLs may be +supplied without a checksum, or Git downloads may specify a branch or +tag instead of a hash. If the user does not control or trust the +source of an untrusted download, it is a security risk. Unless otherwise +specified by the user for special cases, Spack should by default use +*only* trusted download methods. + +Unfortunately, Spack does not currently provide that guarantee. It +does provide the following mechanisms for safety: + +#. By default, Spack will only install a tarball package if it has a + checksum and that checksum matches. You can override this with + ``spack install --no-checksum``. + +#. Numeric versions are almost always tarball downloads, whereas + non-numeric versions not named ``develop`` frequently download + untrusted branches or tags from a version control system. As long + as a package has at least one numeric version, and no non-numeric + version named ``develop``, Spack will prefer it over any + non-numeric versions. + +^^^^^^^^^ +Checksums +^^^^^^^^^ + +For tarball downloads, Spack can currently support checksums using the +MD5, SHA-1, SHA-224, SHA-256, SHA-384, and SHA-512 algorithms. It +determines the algorithm to use based on the hash length. + +----------------------- +Package Version Numbers +----------------------- + +Most Spack versions are numeric, a tuple of integers; for example, +``apex@0.1``, ``ferret@6.96`` or ``py-netcdf@1.2.3.1``. Spack knows +how to compare and sort numeric versions. + +Some Spack versions involve slight extensions of numeric syntax; for +example, ``py-sphinx-rtd-theme@0.1.10a0``. In this case, numbers are +always considered to be "newer" than letters. This is for consistency +with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_. + +Spack versions may also be arbitrary non-numeric strings; any string +here will suffice; for example, ``@develop``, ``@master``, ``@local``. +The following rules determine the sort order of numeric +vs. non-numeric versions: + +#. The non-numeric versions ``@develop`` is considered greatest (newest). + +#. Numeric versions are all less than ``@develop`` version, and are + sorted numerically. + +#. All other non-numeric versions are less than numeric versions, and + are sorted alphabetically. + +The logic behind this sort order is two-fold: + +#. Non-numeric versions are usually used for special cases while + developing or debugging a piece of software. Keeping most of them + less than numeric versions ensures that Spack choose numeric + versions by default whenever possible. + +#. The most-recent development version of a package will usually be + newer than any released numeric versions. This allows the + ``develop`` version to satisfy dependencies like ``depends_on(abc, + when="@x.y.z:")`` + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Concretization Version Selection +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +When concretizing, many versions might match a user-supplied spec. +For example, the spec ``python`` matches all available versions of the +package ``python``. Similarly, ``python@3:`` matches all versions of +Python3. Given a set of versions that match a spec, Spack +concretization uses the following priorities to decide which one to +use: + +#. If the user provided a list of versions in ``packages.yaml``, the + first matching version in that list will be used. + +#. If one or more versions is specified as ``preferred=True``, in + either ``packages.yaml`` or ``package.py``, the largest matching + version will be used. ("Latest" is defined by the sort order + above). + +#. If no preferences in particular are specified in the package or in + ``packages.yaml``, then the largest matching non-develop version + will be used. By avoiding ``@develop``, this prevents users from + accidentally installing a ``@develop`` version. + +#. If all else fails and ``@develop`` is the only matching version, it + will be used. + +^^^^^^^^^^^^^ +Date Versions +^^^^^^^^^^^^^ + +If you wish to use dates as versions, it is best to use the format +``@date-yyyy-mm-dd``. This will ensure they sort in the correct +order. If you want your date versions to be numeric (assuming they +don't conflict with other numeric versions), you can use just +``yyyy.mm.dd``. + +Alternately, you might use a hybrid release-version / date scheme. +For example, ``@1.3.2016.08.31`` would mean the version from the +``1.3`` branch, as of August 31, 2016. + ------------------- Adding new versions @@ -459,19 +586,6 @@ it executable, then runs it with some arguments. installer = Executable(self.stage.archive_file) installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.') -^^^^^^^^^ -Checksums -^^^^^^^^^ - -Spack uses a checksum to ensure that the downloaded package version is -not corrupted or compromised. This is especially important when -fetching from insecure sources, like unencrypted http. By default, a -package will *not* be installed if it doesn't pass a checksum test -(though you can override this with ``spack install --no-checksum``). - -Spack can currently support checksums using the MD5, SHA-1, SHA-224, -SHA-256, SHA-384, and SHA-512 algorithms. It determines the algorithm -to use based on the hash length. ^^^^^^^^^^^^^ ``spack md5`` @@ -502,7 +616,7 @@ Doing this for lots of files, or whenever a new package version is released, is tedious. See ``spack checksum`` below for an automated version of this process. -.. _spack-checksum: +.. _cmd-spack-checksum: ^^^^^^^^^^^^^^^^^^ ``spack checksum`` @@ -584,39 +698,6 @@ call to your package with parameters indicating the repository URL and any branch, tag, or revision to fetch. See below for the parameters you'll need for each VCS system. -^^^^^^^^^^^^^^^^^^^^^^^^^ -Repositories and versions -^^^^^^^^^^^^^^^^^^^^^^^^^ - -The package author is responsible for coming up with a sensible name -for each version to be fetched from a repository. For example, if -you're fetching from a tag like ``v1.0``, you might call that ``1.0``. -If you're fetching a nameless git commit or an older subversion -revision, you might give the commit an intuitive name, like ``develop`` -for a development version, or ``some-fancy-new-feature`` if you want -to be more specific. - -In general, it's recommended to fetch tags or particular -commits/revisions, NOT branches or the repository mainline, as -branches move forward over time and you aren't guaranteed to get the -same thing every time you fetch a particular version. Life isn't -always simple, though, so this is not strictly enforced. - -When fetching from from the branch corresponding to the development version -(often called ``master``, ``trunk``, or ``dev``), it is recommended to -call this version ``develop``. Spack has special treatment for this version so -that ``@develop`` will satisfy dependencies like -``depends_on(abc, when="@x.y.z:")``. In other words, ``@develop`` is -greater than any other version. The rationale is that certain features or -options first appear in the development branch. Therefore if a package author -wants to keep the package on the bleeding edge and provide support for new -features, it is advised to use ``develop`` for such a version which will -greatly simplify writing dependencies and version-related conditionals. - -In some future release, Spack may support extrapolating repository -versions as it does for tarball URLs, but currently this is not -supported. - .. _git-fetch: ^^^ @@ -642,8 +723,7 @@ Default branch ... version('develop', git='https://github.com/example-project/example.git') - This is not recommended, as the contents of the default branch - change over time. + This download method is untrusted, and is not recommended. Tags To fetch from a particular tag, use the ``tag`` parameter along with @@ -654,6 +734,8 @@ Tags version('1.0.1', git='https://github.com/example-project/example.git', tag='v1.0.1') + This download method is untrusted, and is not recommended. + Branches To fetch a particular branch, use ``branch`` instead: @@ -662,8 +744,7 @@ Branches version('experimental', git='https://github.com/example-project/example.git', branch='experimental') - This is not recommended, as the contents of branches change over - time. + This download method is untrusted, and is not recommended. Commits Finally, to fetch a particular commit, use ``commit``: @@ -681,6 +762,9 @@ Commits version('2014-10-08', git='https://github.com/example-project/example.git', commit='9d38cd') + This download method *is trusted*. It is the recommended way to + securely download from a Git repository. + It may be useful to provide a saner version for commits like this, e.g. you might use the date as the version, as done above. Or you could just use the abbreviated commit hash. It's up to the package @@ -696,19 +780,24 @@ Submodules version('1.0.1', git='https://github.com/example-project/example.git', tag='v1.0.1', submdoules=True) -^^^^^^^^^^ -Installing -^^^^^^^^^^ -You can fetch and install any of the versions above as you'd expect, -by using ``@<version>`` in a spec: +.. _github-fetch: -.. code-block:: console +"""""" +GitHub +"""""" - $ spack install example@2014-10-08 +If a project is hosted on GitHub, *any* valid Git branch, tag or hash +may be downloaded as a tarball. This is accomplished simply by +constructing an appropriate URL. Spack can checksum any package +downloaded this way, thereby producing a trusted download. For +example, the following downloads a particular hash, and then applies a +checksum. -Git and other VCS versions will show up in the list of versions when -a user runs ``spack info <package name>``. +.. code-block:: python + + version('1.9.5.1.1', 'd035e4bc704d136db79b43ab371b27d2', + url='https://www.github.com/jswhit/pyproj/tarball/0be612cc9f972e38b50a90c946a9b353e2ab140f') .. _hg-fetch: @@ -726,8 +815,7 @@ Default version('develop', hg='https://jay.grs.rwth-aachen.de/hg/example') - Note that this is not recommended; try to fetch a particular - revision instead. + This download method is untrusted, and is not recommended. Revisions Add ``hg`` and ``revision`` parameters: @@ -737,6 +825,8 @@ Revisions version('1.0', hg='https://jay.grs.rwth-aachen.de/hg/example', revision='v1.0') + This download method is untrusted, and is not recommended. + Unlike ``git``, which has special parameters for different types of revisions, you can use ``revision`` for branches, tags, and commits when you fetch with Mercurial. @@ -759,7 +849,7 @@ Fetching the head version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk') - This is not recommended, as the head will move forward over time. + This download method is untrusted, and is not recommended. Fetching a revision To fetch a particular revision, add a ``revision`` to the @@ -770,9 +860,41 @@ Fetching a revision version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk', revision=128) + This download method is untrusted, and is not recommended. + Subversion branches are handled as part of the directory structure, so you can check out a branch or tag by changing the ``url``. +----------------------------------------- +Standard repositories for python packages +----------------------------------------- + +In addition to their developer websites, many python packages are hosted at the +`Python Package Index (PyPi) <https://pypi.python.org/pypi>`_. Although links to +these individual files are typically `generated using a hash +<https://bitbucket.org/pypa/pypi/issues/438>`_ it is often possible to find a +reliable link of the format + +.. code-block:: sh + + https://pypi.python.org/packages/source/<first letter of package>/<package>/<package>-<version>.<extension> + +Packages hosted on GitHub and the like are often developer versions that do not +contain all of the files (e.g. configuration scripts) necessary to support +compilation. For this reason it is ideal to link to a repository such as PyPi +if possible. + +More recently, sources are being indexed at `pypi.io <https://pypi.io>`_ as +well. Links obtained from this site follow a similar pattern, namely + +.. code-block:: sh + + https://pypi.io/packages/source/<first letter of package>/<package>/<package>-<version>.<extension> + +These links currently redirect back to `pypi.python.org +<https://pypi.python.org>`_, but this `may change in the future +<https://bitbucket.org/pypa/pypi/issues/438#comment-27243225>`_. + ------------------------------------------------- Expanding additional resources in the source tree ------------------------------------------------- @@ -1054,7 +1176,7 @@ function gives you some benefits. First, spack ensures that the ``patch()`` function is run once per code checkout. That means that if you run install, hit ctrl-C, and run install again, the code in the patch function is only run once. Also, you can tell Spack to run only -the patching part of the build using the :ref:`spack-patch` command. +the patching part of the build using the :ref:`cmd-spack-patch` command. --------------- Handling RPATHs @@ -1068,7 +1190,7 @@ dynamic loader where to find its dependencies at runtime. You may be familiar with `LD_LIBRARY_PATH <http://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html>`_ on Linux or `DYLD_LIBRARY_PATH -<https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/dyld.1.html>` +<https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/dyld.1.html>`_ on Mac OS X. RPATH is similar to these paths, in that it tells the loader where to find libraries. Unlike them, it is embedded in the binary and not set in each user's environment. @@ -1125,7 +1247,7 @@ information about the package, and to determine where to download its source code. Spack uses the tarball URL to extrapolate where to find other tarballs -of the same package (e.g. in `spack checksum <spack-checksum_>`_, but +of the same package (e.g. in :ref:`cmd-spack-checksum`, but this does not always work. This section covers ways you can tell Spack to find tarballs elsewhere. @@ -1136,7 +1258,7 @@ Spack to find tarballs elsewhere. ^^^^^^^^^^^^ When spack tries to find available versions of packages (e.g. with -`spack checksum <spack-checksum_>`_), it spiders the parent directory +:ref:`cmd-spack-checksum`), it spiders the parent directory of the tarball in the ``url`` attribute. For example, for libelf, the url is: @@ -1345,31 +1467,34 @@ Additionally, dependencies may be specified for specific use cases: The dependency types are: - * **"build"**: The dependency package is made available during the - package's build. While the package is built, the dependency - package's install directory will be added to ``PATH``, the - compiler include and library paths, as well as ``PYTHONPATH``. - This only applies during this package's build; other packages - which depend on this one will not know about the dependency - package. In other words, building another project Y doesn't know - about this project X's build dependencies. - * **"link"**: The dependency package is linked against by this - package, presumably via shared libraries. The dependency package - will be added to this package's run-time library search path - ``rpath``. - * **"run"**: The dependency package is used by this package at run - time. The dependency package will be added to both ``PATH`` and - ``PYTHONPATH`` at run time, but not during build time. **"link"** - and **"run"** are similar in that they both describe a dependency - that exists when the package is used, but they differ in the - mechanism: **"link"** is via shared libraries, and **"run"** via - an explicit search. - -If not specified, ``type`` is assumed to be ``("build", "link")``. -This is the common case for compiled language usage. Also available -are the aliases ``"alldeps"`` for all dependency types combined, and -``"nolink"`` (``("build", "run")``) for use by dependencies which are -not expressed via a linker (e.g., Python or Lua module loading). + * **"build"**: made available during the project's build. The package will + be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``. + Other projects which depend on this one will not have these modified + (building project X doesn't need project Y's build dependencies). + * **"link"**: the project is linked to by the project. The package will be + added to the current package's ``rpath``. + * **"run"**: the project is used by the project at runtime. The package will + be added to ``PATH`` and ``PYTHONPATH``. + +Additional hybrid dependency types are (note the lack of quotes): + + * **<not specified>**: ``type`` assumed to be ``("build", + "link")``. This is the common case for compiled language usage. + * **alldeps**: All dependency types. **Note:** No quotes here + * **nolink**: Equal to ``("build", "run")``, for use by dependencies + that are not expressed via a linker (e.g., Python or Lua module + loading). **Note:** No quotes here + +""""""""""""""""""" +Dependency Formulas +""""""""""""""""""" + +This section shows how to write appropriate ``depends_on()`` +declarations for some common cases. + +* Python 2 only: ``depends_on('python@:2.8')`` +* Python 2.7 only: ``depends_on('python@2.7:2.8')`` +* Python 3 only: ``depends_on('python@3:')`` .. _setup-dependent-environment: @@ -1458,6 +1583,17 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack activate``. When it is activated, all the files in its prefix will be symbolically linked into the prefix of the python package. +Some packages produce a Python extension, but are only compatible with +Python 3, or with Python 2. In those cases, a ``depends_on()`` +declaration should be made in addition to the ``extends()`` +declaration: + +.. code-block:: python + + class Icebin(Package): + extends('python', when='+python') + depends_on('python@3:', when='+python') + Many packages produce Python extensions for *some* variants, but not others: they should extend ``python`` only if the appropriate variant(s) are selected. This may be accomplished with conditional @@ -1779,7 +1915,7 @@ explicitly. Concretization policies are discussed in more detail in :ref:`configuration`. Sites using Spack can customize them to match the preferences of their own users. -.. _spack-spec: +.. _cmd-spack-spec: ^^^^^^^^^^^^^^ ``spack spec`` @@ -1817,6 +1953,46 @@ See the :ref:`concretization-preferences` section for more details. .. _install-method: +------------------ +Inconsistent Specs +------------------ + +Suppose a user needs to install package C, which depends on packages A +and B. Package A builds a library with a Python2 extension, and +package B builds a library with a Python3 extension. Packages A and B +cannot be loaded together in the same Python runtime: + +.. code-block:: python + + class A(Package): + variant('python', default=True, 'enable python bindings') + depends_on('python@2.7', when='+python') + def install(self, spec, prefix): + # do whatever is necessary to enable/disable python + # bindings according to variant + + class B(Package): + variant('python', default=True, 'enable python bindings') + depends_on('python@3.2:', when='+python') + def install(self, spec, prefix): + # do whatever is necessary to enable/disable python + # bindings according to variant + +Package C needs to use the libraries from packages A and B, but does +not need either of the Python extensions. In this case, package C +should simply depend on the ``~python`` variant of A and B: + +.. code-block:: python + + class C(Package): + depends_on('A~python') + depends_on('B~python') + +This may require that A or B be built twice, if the user wishes to use +the Python extensions provided by them: once for ``+python`` and once +for ``~python``. Other than using a little extra disk space, that +solution has no serious problems. + ----------------------------------- Implementing the ``install`` method ----------------------------------- @@ -1972,7 +2148,7 @@ discover its dependencies. If you want to see the environment that a package will build with, or if you want to run commands in that environment to test them out, you -can use the :ref:`spack env <spack-env>` command, documented +can use the :ref:`cmd-spack-env` command, documented below. .. _compiler-wrappers: @@ -2751,7 +2927,7 @@ A typical package workflow might look like this: Below are some commands that will allow you some finer-grained control over the install process. -.. _spack-fetch: +.. _cmd-spack-fetch: ^^^^^^^^^^^^^^^ ``spack fetch`` @@ -2766,7 +2942,7 @@ directory will be located under ``$SPACK_HOME/var/spack``. When run after the archive has already been downloaded, ``spack fetch`` is idempotent and will not download the archive again. -.. _spack-stage: +.. _cmd-spack-stage: ^^^^^^^^^^^^^^^ ``spack stage`` @@ -2777,7 +2953,7 @@ the downloaded archive in its temporary directory, where it will be built by ``spack install``. Similar to ``fetch``, if the archive has already been expanded, ``stage`` is idempotent. -.. _spack-patch: +.. _cmd-spack-patch: ^^^^^^^^^^^^^^^ ``spack patch`` @@ -2791,7 +2967,7 @@ this step if they have been. If Spack discovers that patches didn't apply cleanly on some previous run, then it will restage the entire package before patching. -.. _spack-restage: +.. _cmd-spack-restage: ^^^^^^^^^^^^^^^^^ ``spack restage`` @@ -2807,7 +2983,7 @@ Does this in one of two ways: #. If the source was checked out from a repository, this deletes the build directory and checks it out again. -.. _spack-clean: +.. _cmd-spack-clean: ^^^^^^^^^^^^^^^ ``spack clean`` @@ -2818,7 +2994,7 @@ expanded/checked out source code *and* any downloaded archive. If ``fetch``, ``stage``, or ``install`` are run again after this, Spack's build process will start from scratch. -.. _spack-purge: +.. _cmd-spack-purge: ^^^^^^^^^^^^^^^ ``spack purge`` @@ -2876,7 +3052,7 @@ to get rid of the install prefix before you build again: Graphing dependencies --------------------- -.. _spack-graph: +.. _cmd-spack-graph: ^^^^^^^^^^^^^^^ ``spack graph`` @@ -2936,7 +3112,7 @@ For ``csh`` and ``tcsh`` run: ``spack cd`` will then be available. -.. _spack-cd: +.. _cmd-spack-cd: ^^^^^^^^^^^^ ``spack cd`` @@ -2961,14 +3137,14 @@ build it: directory containing the expanded ``libelf`` source code. There are a number of other places you can cd to in the spack directory hierarchy: -.. command-output:: spack cd -h +.. command-output:: spack cd --help Some of these change directory into package-specific locations (stage directory, install directory, package directory) and others change to -core spack locations. For example, ``spack cd -m`` will take you to +core spack locations. For example, ``spack cd --module-dir`` will take you to the main python source directory of your spack install. -.. _spack-env: +.. _cmd-spack-env: ^^^^^^^^^^^^^ ``spack env`` @@ -2997,7 +3173,7 @@ provide them after the spec argument to ``spack env``: This will cd to the build directory and then run ``configure`` in the package's build environment. -.. _spack-location: +.. _cmd-spack-location: ^^^^^^^^^^^^^^^^^^ ``spack location`` @@ -3009,13 +3185,13 @@ cd'ing to it. In bash, this: .. code-block:: console - $ cd $(spack location -b <spec>) + $ cd $(spack location --build-dir <spec>) is the same as: .. code-block:: console - $ spack cd -b <spec> + $ spack cd --build-dir <spec> ``spack location`` is intended for use in scripts or makefiles that need to know where packages are installed. e.g., in a makefile you @@ -3023,19 +3199,19 @@ might write: .. code-block:: makefile - DWARF_PREFIX = $(spack location -i libdwarf) + DWARF_PREFIX = $(spack location --install-dir libdwarf) CXXFLAGS += -I$DWARF_PREFIX/include CXXFLAGS += -L$DWARF_PREFIX/lib ----------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Build System Configuration Support ----------------------------------- +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -Imagine a developer creating a CMake or Autotools-based project in a local -directory, which depends on libraries A-Z. Once Spack has installed -those dependencies, one would like to run ``cmake`` with appropriate -command line and environment so CMake can find them. The ``spack -setup`` command does this conveniently, producing a CMake +Imagine a developer creating a CMake or Autotools-based project in a +local directory, which depends on libraries A-Z. Once Spack has +installed those dependencies, one would like to run ``cmake`` with +appropriate command line and environment so CMake can find them. The +``spack setup`` command does this conveniently, producing a CMake configuration that is essentially the same as how Spack *would have* configured the project. This can be demonstrated with a usage example: diff --git a/lib/spack/docs/workflows.rst b/lib/spack/docs/workflows.rst new file mode 100644 index 0000000000..314f57c8b5 --- /dev/null +++ b/lib/spack/docs/workflows.rst @@ -0,0 +1,1198 @@ +========= +Workflows +========= + +The process of using Spack involves building packages, running +binaries from those packages, and developing software that depends on +those packages. For example, one might use Spack to build the +``netcdf`` package, use ``spack load`` to run the ``ncdump`` binary, and +finally, write a small C program to read/write a particular NetCDF file. + +Spack supports a variety of workflows to suit a variety of situations +and user preferences, there is no single way to do all these things. +This chapter demonstrates different workflows that have been +developed, pointing out the pros and cons of them. + +----------- +Definitions +----------- + +First some basic definitions. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Package, Concrete Spec, Installed Package +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +In Spack, a package is an abstract recipe to build one piece of software. +Spack packages may be used to build, in principle, any version of that +software with any set of variants. Examples of packages include +``curl`` and ``zlib``. + +A package may be *instantiated* to produce a concrete spec; one +possible realization of a particular package, out of combinatorially +many other realizations. For example, here is a concrete spec +instantiated from ``curl``: + +.. code-block:: console + + curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 + +Spack's core concretization algorithm generates concrete specs by +instantiating packages from its repo, based on a set of "hints", +including user input and the ``packages.yaml`` file. This algorithm +may be accessed at any time with the ``spack spec`` command. For +example: + +.. code-block:: console + + $ spack spec curl + curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 + +Every time Spack installs a package, that installation corresponds to +a concrete spec. Only a vanishingly small fraction of possible +concrete specs will be installed at any one Spack site. + +^^^^^^^^^^^^^^^ +Consistent Sets +^^^^^^^^^^^^^^^ + +A set of Spack specs is said to be *consistent* if each package is +only instantiated one way within it --- that is, if two specs in the +set have the same package, then they must also have the same version, +variant, compiler, etc. For example, the following set is consistent: + +.. code-block:: console + + curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 + zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 + +The following set is not consistent: + +.. code-block:: console + + curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64 + ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 + zlib@1.2.7%gcc@5.3.0 arch=linux-SuSE11-x86_64 + +The compatibility of a set of installed packages determines what may +be done with it. It is always possible to ``spack load`` any set of +installed packages, whether or not they are consistent, and run their +binaries from the command line. However, a set of installed packages +can only be linked together in one binary if it is consistent. + +If the user produces a series of ``spack spec`` or ``spack load`` +commands, in general there is no guarantee of consistency between +them. Spack's concretization procedure guarantees that the results of +any *single* ``spack spec`` call will be consistent. Therefore, the +best way to ensure a consistent set of specs is to create a Spack +package with dependencies, and then instantiate that package. We will +use this technique below. + +----------------- +Building Packages +----------------- + +Suppose you are tasked with installing a set of software packages on a +system in order to support one application -- both a core application +program, plus software to prepare input and analyze output. The +required software might be summed up as a series of ``spack install`` +commands placed in a script. If needed, this script can always be run +again in the future. For example: + +.. code-block:: sh + + #!/bin/sh + spack install modele-utils + spack install emacs + spack install ncview + spack install nco + spack install modele-control + spack install py-numpy + +In most cases, this script will not correctly install software +according to your specific needs: choices need to be made for +variants, versions and virtual dependency choices may be needed. It +*is* possible to specify these choices by extending specs on the +command line; however, the same choices must be specified repeatedly. +For example, if you wish to use ``openmpi`` to satisfy the ``mpi`` +dependency, then ``^openmpi`` will have to appear on *every* ``spack +install`` line that uses MPI. It can get repetitive fast. + +Customizing Spack installation options is easier to do in the +``~/.spack/packages.yaml`` file. In this file, you can specify +preferred versions and variants to use for packages. For example: + +.. code-block:: yaml + + packages: + python: + version: [3.5.1] + modele-utils: + version: [cmake] + + everytrace: + version: [develop] + eigen: + variants: ~suitesparse + netcdf: + variants: +mpi + + all: + compiler: [gcc@5.3.0] + providers: + mpi: [openmpi] + blas: [openblas] + lapack: [openblas] + + +This approach will work as long as you are building packages for just +one application. + +^^^^^^^^^^^^^^^^^^^^^ +Multiple Applications +^^^^^^^^^^^^^^^^^^^^^ + +Suppose instead you're building multiple inconsistent applications. +For example, users want package A to be built with ``openmpi`` and +package B with ``mpich`` --- but still share many other lower-level +dependencies. In this case, a single ``packages.yaml`` file will not +work. Plans are to implement *per-project* ``packages.yaml`` files. +In the meantime, one could write shell scripts to switch +``packages.yaml`` between multiple versions as needed, using symlinks. + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Combinatorial Sets of Installs +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Suppose that you are now tasked with systematically building many +incompatible versions of packages. For example, you need to build +``petsc`` 9 times for 3 different MPI implementations on 3 different +compilers, in order to support user needs. In this case, you will +need to either create 9 different ``packages.yaml`` files; or more +likely, create 9 different ``spack install`` command lines with the +correct options in the spec. Here is a real-life example of this kind +of usage: + +.. code-block:: sh + + #!/bin/sh + + compilers=( + %gcc + %intel + %pgi + ) + + mpis=( + openmpi+psm~verbs + openmpi~psm+verbs + mvapich2+psm~mrail + mvapich2~psm+mrail + mpich+verbs + ) + + for compiler in "${compilers[@]}" + do + # Serial installs + spack install szip $compiler + spack install hdf $compiler + spack install hdf5 $compiler + spack install netcdf $compiler + spack install netcdf-fortran $compiler + spack install ncview $compiler + + # Parallel installs + for mpi in "${mpis[@]}" + do + spack install $mpi $compiler + spack install hdf5~cxx+mpi $compiler ^$mpi + spack install parallel-netcdf $compiler ^$mpi + done + done + +------------------------------ +Running Binaries from Packages +------------------------------ + +Once Spack packages have been built, the next step is to use them. As +with building packages, there are many ways to use them, depending on +the use case. + +^^^^^^^^^^^^ +Find and Run +^^^^^^^^^^^^ + +The simplest way to run a Spack binary is to find it and run it! +In many cases, nothing more is needed because Spack builds binaries +with RPATHs. Spack installation directories may be found with ``spack +location --install-dir`` commands. For example: + +.. code-block:: console + + $ spack location --install-dir cmake + /home/me/spack2/opt/spack/linux-SuSE11-x86_64/gcc-5.3.0/cmake-3.6.0-7cxrynb6esss6jognj23ak55fgxkwtx7 + +This gives the root of the Spack package; relevant binaries may be +found within it. For example: + +.. code-block:: console + + $ CMAKE=`spack location --install-dir cmake`/bin/cmake + + +Standard UNIX tools can find binaries as well. For example: + +.. code-block:: console + + $ find ~/spack2/opt -name cmake | grep bin + /home/me/spack2/opt/spack/linux-SuSE11-x86_64/gcc-5.3.0/cmake-3.6.0-7cxrynb6esss6jognj23ak55fgxkwtx7/bin/cmake + +These methods are suitable, for example, for setting up build +processes or GUIs that need to know the location of particular tools. +However, other more powerful methods are generally preferred for user +environments. + + +^^^^^^^^^^^^^^^^^^^^^^^ +Spack-Generated Modules +^^^^^^^^^^^^^^^^^^^^^^^ + +Suppose that Spack has been used to install a set of command-line +programs, which users now wish to use. One can in principle put a +number of ``spack load`` commands into ``.bashrc``, for example, to +load a set of Spack-generated modules: + +.. code-block:: sh + + spack load modele-utils + spack load emacs + spack load ncview + spack load nco + spack load modele-control + +Although simple load scripts like this are useful in many cases, they +have some drawbacks: + +1. The set of modules loaded by them will in general not be + consistent. They are a decent way to load commands to be called + from command shells. See below for better ways to assemble a + consistent set of packages for building application programs. + +2. The ``spack spec`` and ``spack install`` commands use a + sophisticated concretization algorithm that chooses the "best" + among several options, taking into account ``packages.yaml`` file. + The ``spack load`` and ``spack module loads`` commands, on the + other hand, are not very smart: if the user-supplied spec matches + more than one installed package, then ``spack module loads`` will + fail. This may change in the future. For now, the workaround is to + be more specific on any ``spack module loads`` lines that fail. + + +"""""""""""""""""""""" +Generated Load Scripts +"""""""""""""""""""""" + +Another problem with using `spack load` is, it is slow; a typical user +environment could take several seconds to load, and would not be +appropriate to put into ``.bashrc`` directly. It is preferable to use +a series of ``spack module loads`` commands to pre-compute which +modules to load. These can be put in a script that is run whenever +installed Spack packages change. For example: + +.. code-block:: sh + + #!/bin/sh + # + # Generate module load commands in ~/env/spackenv + + cat <<EOF | /bin/sh >$HOME/env/spackenv + FIND='spack module loads --prefix linux-SuSE11-x86_64/' + + \$FIND modele-utils + \$FIND emacs + \$FIND ncview + \$FIND nco + \$FIND modele-control + EOF + +The output of this file is written in ``~/env/spackenv``: + +.. code-block:: sh + + # binutils@2.25%gcc@5.3.0+gold~krellpatch~libiberty arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/binutils-2.25-gcc-5.3.0-6w5d2t4 + # python@2.7.12%gcc@5.3.0~tk~ucs4 arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/python-2.7.12-gcc-5.3.0-2azoju2 + # ncview@2.1.7%gcc@5.3.0 arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/ncview-2.1.7-gcc-5.3.0-uw3knq2 + # nco@4.5.5%gcc@5.3.0 arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/nco-4.5.5-gcc-5.3.0-7aqmimu + # modele-control@develop%gcc@5.3.0 arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/modele-control-develop-gcc-5.3.0-7rddsij + # zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/zlib-1.2.8-gcc-5.3.0-fe5onbi + # curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/curl-7.50.1-gcc-5.3.0-4vlev55 + # hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/hdf5-1.10.0-patch1-gcc-5.3.0-pwnsr4w + # netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/netcdf-4.4.1-gcc-5.3.0-rl5canv + # netcdf-fortran@4.4.4%gcc@5.3.0 arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/netcdf-fortran-4.4.4-gcc-5.3.0-stdk2xq + # modele-utils@cmake%gcc@5.3.0+aux+diags+ic arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/modele-utils-cmake-gcc-5.3.0-idyjul5 + # everytrace@develop%gcc@5.3.0+fortran+mpi arch=linux-SuSE11-x86_64 + module load linux-SuSE11-x86_64/everytrace-develop-gcc-5.3.0-p5wmb25 + +Users may now put ``source ~/env/spackenv`` into ``.bashrc``. + +.. note :: + + Some module systems put a prefix on the names of modules created + by Spack. For example, that prefix is ``linux-SuSE11-x86_64/`` in + the above case. If a prefix is not needed, you may omit the + ``--prefix`` flag from ``spack module loads``. + + +""""""""""""""""""""""" +Transitive Dependencies +""""""""""""""""""""""" + +In the script above, each ``spack module loads`` command generates a +*single* ``module load`` line. Transitive dependencies do not usually +need to be loaded, only modules the user needs in in ``$PATH``. This is +because Spack builds binaries with RPATH. Spack's RPATH policy has +some nice features: + +#. Modules for multiple inconsistent applications may be loaded + simultaneously. In the above example (Multiple Applications), + package A and package B can coexist together in the user's $PATH, + even though they use different MPIs. + +#. RPATH eliminates a whole class of strange errors that can happen + in non-RPATH binaries when the wrong ``LD_LIBRARY_PATH`` is + loaded. + +#. Recursive module systems such as LMod are not necessary. + +#. Modules are not needed at all to execute binaries. If a path to a + binary is known, it may be executed. For example, the path for a + Spack-built compiler can be given to an IDE without requiring the + IDE to load that compiler's module. + +Unfortunately, Spack's RPATH support does not work in all case. For example: + +#. Software comes in many forms --- not just compiled ELF binaries, + but also as interpreted code in Python, R, JVM bytecode, etc. + Those systems almost universally use an environment variable + analogous to ``LD_LIBRARY_PATH`` to dynamically load libraries. + +#. Although Spack generally builds binaries with RPATH, it does not + currently do so for compiled Python extensions (for example, + ``py-numpy``). Any libraries that these extensions depend on + (``blas`` in this case, for example) must be specified in the + ``LD_LIBRARY_PATH``.` + +#. In some cases, Spack-generated binaries end up without a + functional RPATH for no discernible reason. + +In cases where RPATH support doesn't make things "just work," it can +be necessary to load a module's dependencies as well as the module +itself. This is done by adding the ``--dependencies`` flag to the +``spack module loads`` command. For example, the following line, +added to the script above, would be used to load SciPy, along with +Numpy, core Python, BLAS/LAPACK and anything else needed: + +.. code-block:: sh + + spack module loads --dependencies py-scipy + +^^^^^^^^^^^^^^^^^^ +Extension Packages +^^^^^^^^^^^^^^^^^^ + +:ref:`packaging_extensions` may be used as an alternative to loading +Python (and similar systems) packages directly. If extensions are +activated, then ``spack load python`` will also load all the +extensions activated for the given ``python``. This reduces the need +for users to load a large number of modules. + +However, Spack extensions have two potential drawbacks: + +#. Activated packages that involve compiled C extensions may still + need their dependencies to be loaded manually. For example, + ``spack load openblas`` might be required to make ``py-numpy`` + work. + +#. Extensions "break" a core feature of Spack, which is that multiple + versions of a package can co-exist side-by-side. For example, + suppose you wish to run a Python package in two different + environments but the same basic Python --- one with + ``py-numpy@1.7`` and one with ``py-numpy@1.8``. Spack extensions + will not support this potential debugging use case. + + +^^^^^^^^^^^^^^ +Dummy Packages +^^^^^^^^^^^^^^ + +As an alternative to a series of ``module load`` commands, one might +consider dummy packages as a way to create a *consistent* set of +packages that may be loaded as one unit. The idea here is pretty +simple: + +#. Create a package (say, ``mydummy``) with no URL and no + ``install()`` method, just dependencies. + +#. Run ``spack install mydummy`` to install. + +An advantage of this method is the set of packages produced will be +consistent. This means that you can reliably build software against +it. A disadvantage is the set of packages will be consistent; this +means you cannot load up two applications this way if they are not +consistent with each other. + +^^^^^^^^^^^^^^^^ +Filesystem Views +^^^^^^^^^^^^^^^^ + +Filesystem views offer an alternative to environment modules, another +way to assemble packages in a useful way and load them into a user's +environment. + +A filesystem view is a single directory tree that is the union of the +directory hierarchies of a number of installed packages; it is similar +to the directory hiearchy that might exist under ``/usr/local``. The +files of the view's installed packages are brought into the view by +symbolic or hard links, referencing the original Spack installation. + +When software is built and installed, absolute paths are frequently +"baked into" the software, making it non-relocatable. This happens +not just in RPATHs, but also in shebangs, configuration files, and +assorted other locations. + +Therefore, programs run out of a Spack view will typically still look +in the original Spack-installed location for shared libraries and +other resources. This behavior is not easily changed; in general, +there is no way to know where absolute paths might be written into an +installed package, and how to relocate it. Therefore, the original +Spack tree must be kept in place for a filesystem view to work, even +if the view is built with hardlinks. + +.. FIXME: reference the relocation work of Hegner and Gartung (PR #1013) + + +"""""""""""""""""""""" +Using Filesystem Views +"""""""""""""""""""""" + +A filesystem view is created, and packages are linked in, by the ``spack +view`` command's ``symlink`` and ``hardlink`` sub-commands. The +``spack view remove`` command can be used to unlink some or all of the +filesystem view. + +The following example creates a filesystem view based +on an installed ``cmake`` package and then removes from the view the +files in the ``cmake`` package while retaining its dependencies. + +.. code-block:: console + + $ spack view --verbose symlink myview cmake@3.5.2 + ==> Linking package: "ncurses" + ==> Linking package: "zlib" + ==> Linking package: "openssl" + ==> Linking package: "cmake" + + $ ls myview/ + bin doc etc include lib share + + $ ls myview/bin/ + captoinfo clear cpack ctest infotocap openssl tabs toe tset + ccmake cmake c_rehash infocmp ncurses6-config reset tic tput + + $ spack view --verbose --dependencies false rm myview cmake@3.5.2 + ==> Removing package: "cmake" + + $ ls myview/bin/ + captoinfo c_rehash infotocap openssl tabs toe tset + clear infocmp ncurses6-config reset tic tput + +.. note:: + + If the set of packages being included in a view is inconsistent, + then it is possible that two packages will provide the same file. Any + conflicts of this type are handled on a first-come-first-served basis, + and a warning is printed. + +.. note:: + + When packages are removed from a view, empty directories are + purged. + +"""""""""""""""""" +Fine-Grain Control +"""""""""""""""""" + +The ``--exclude`` and ``--dependencies`` option flags allow for +fine-grained control over which packages and dependencies do or not +get included in a view. For example, suppose you are developing the +``appsy`` package. You wish to build against a view of all ``appsy`` +dependencies, but not ``appsy`` itself: + +.. code-block:: console + + $ spack view symlink --dependencies yes --exclude appsy appsy + +Alternately, you wish to create a view whose purpose is to provide +binary executables to end users. You only need to include +applications they might want, and not those applications' +dependencies. In this case, you might use: + +.. code-block:: console + + $ spack view symlink --dependencies no cmake + + +""""""""""""""""""""""" +Hybrid Filesystem Views +""""""""""""""""""""""" + +Although filesystem views are usually created by Spack, users are free +to add to them by other means. For example, imagine a filesystem +view, created by Spack, that looks something like: + +.. code-block:: console + + /path/to/MYVIEW/bin/programA -> /path/to/spack/.../bin/programA + /path/to/MYVIEW/lib/libA.so -> /path/to/spack/.../lib/libA.so + +Now, the user may add to this view by non-Spack means; for example, by +running a classic install script. For example: + +.. code-block:: console + + $ tar -xf B.tar.gz + $ cd B/ + $ ./configure --prefix=/path/to/MYVIEW \ + --with-A=/path/to/MYVIEW + $ make && make install + +The result is a hybrid view: + +.. code-block:: console + + /path/to/MYVIEW/bin/programA -> /path/to/spack/.../bin/programA + /path/to/MYVIEW/bin/programB + /path/to/MYVIEW/lib/libA.so -> /path/to/spack/.../lib/libA.so + /path/to/MYVIEW/lib/libB.so + +In this case, real files coexist, interleaved with the "view" +symlinks. At any time one can delete ``/path/to/MYVIEW`` or use +``spack view`` to manage it surgically. None of this will affect the +real Spack install area. + + +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +Discussion: Running Binaries +^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Modules, extension packages and filesystem views are all ways to +assemble sets of Spack packages into a useful environment. They are +all semantically similar, in that conflicting installed packages +cannot simultaneously be loaded, activated or included in a view. + +With all of these approaches, there is no guarantee that the +environment created will be consistent. It is possible, for example, +to simultaneously load application A that uses OpenMPI and application +B that uses MPICH. Both applications will run just fine in this +inconsistent environment because they rely on RPATHs, not the +environment, to find their dependencies. + +In general, environments set up using modules vs. views will work +similarly. Both can be used to set up ephemeral or long-lived +testing/development environments. Operational differences between the +two approaches can make one or the other preferable in certain +environments: + +* Filesystem views do not require environment module infrastructure. + Although Spack can install ``environment-modules``, users might be + hostile to its use. Filesystem views offer a good solution for + sysadmins serving users who just "want all the stuff I need in one + place" and don't want to hear about Spack. + +* Although modern build systems will find dependencies wherever they + might be, some applications with hand-built make files expect their + dependencies to be in one place. One common problem is makefiles + that assume that ``netcdf`` and ``netcdf-fortran`` are installed in + the same tree. Or, one might use an IDE that requires tedious + configuration of dependency paths; and it's easier to automate that + administration in a view-building script than in the IDE itself. + For all these cases, a view will be preferable to other ways to + assemble an environment. + +* On systems with I-node quotas, modules might be preferable to views + and extension packages. + +* Views and activated extensions maintain state that is semantically + equivalent to the information in a ``spack module loads`` script. + Administrators might find things easier to maintain without the + added "heavyweight" state of a view. + +------------------------------ +Developing Software with Spack +------------------------------ + +For any project, one needs to assemble an +environment of that application's dependencies. You might consider +loading a series of modules or creating a filesystem view. This +approach, while obvious, has some serious drawbacks: + +1. There is no guarantee that an environment created this way will be + consistent. Your application could end up with dependency A + expecting one version of MPI, and dependency B expecting another. + The linker will not be happy... + +2. Suppose you need to debug a package deep within your software DAG. + If you build that package with a manual environment, then it + becomes difficult to have Spack auto-build things that depend on + it. That could be a serious problem, depending on how deep the + package in question is in your dependency DAG. + +3. At its core, Spack is a sophisticated concretization algorithm that + matches up packages with appropriate dependencies and creates a + *consistent* environment for the package it's building. Writing a + list of ``spack load`` commands for your dependencies is at least + as hard as writing the same list of ``depends_on()`` declarations + in a Spack package. But it makes no use of Spack concretization + and is more error-prone. + +4. Spack provides an automated, systematic way not just to find a + packages's dependencies --- but also to build other packages on + top. Any Spack package can become a dependency for another Spack + package, offering a powerful vision of software re-use. If you + build your package A outside of Spack, then your ability to use it + as a building block for other packages in an automated way is + diminished: other packages depending on package A will not + be able to use Spack to fulfill that dependency. + +5. If you are reading this manual, you probably love Spack. You're + probably going to write a Spack package for your software so + prospective users can install it with the least amount of pain. + Why should you go to additional work to find dependencies in your + development environment? Shouldn't Spack be able to help you build + your software based on the package you've already written? + +In this section, we show how Spack can be used in the software +development process to greatest effect, and how development packages +can be seamlessly integrated into the Spack ecosystem. We will show +how this process works by example, assuming the software you are +creating is called ``mylib``. + +^^^^^^^^^^^^^^^^^^^^^ +Write the CMake Build +^^^^^^^^^^^^^^^^^^^^^ + +For now, the techniques in this section only work for CMake-based +projects, although they could be easily extended to other build +systems in the future. We will therefore assume you are using CMake +to build your project. + +The ``CMakeLists.txt`` file should be written as normal. A few caveats: + +1. Your project should produce binaries with RPATHs. This will ensure + that they work the same whether built manually or automatically by + Spack. For example: + +.. code-block:: cmake + + # enable @rpath in the install name for any shared library being built + # note: it is planned that a future version of CMake will enable this by default + set(CMAKE_MACOSX_RPATH 1) + + # Always use full RPATH + # http://www.cmake.org/Wiki/CMake_RPATH_handling + # http://www.kitware.com/blog/home/post/510 + + # use, i.e. don't skip the full RPATH for the build tree + SET(CMAKE_SKIP_BUILD_RPATH FALSE) + + # when building, don't use the install RPATH already + # (but later on when installing) + SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE) + + # add the automatically determined parts of the RPATH + # which point to directories outside the build tree to the install RPATH + SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE) + + # the RPATH to be used when installing, but only if it's not a system directory + LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${CMAKE_INSTALL_PREFIX}/lib" isSystemDir) + IF("${isSystemDir}" STREQUAL "-1") + SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib") + ENDIF("${isSystemDir}" STREQUAL "-1") + + +2. Spack provides a CMake variable called + ``SPACK_TRANSITIVE_INCLUDE_PATH``, which contains the ``include/`` + directory for all of your project's transitive dependencies. It + can be useful if your project ``#include``s files from package B, + which ``#include`` files from package C, but your project only + lists project B as a dependency. This works in traditional + single-tree build environments, in which B and C's include files + live in the same place. In order to make it work with Spack as + well, you must add the following to ``CMakeLists.txt``. It will + have no effect when building without Spack: + + .. code-block:: cmake + + # Include all the transitive dependencies determined by Spack. + # If we're not running with Spack, this does nothing... + include_directories($ENV{SPACK_TRANSITIVE_INCLUDE_PATH}) + + .. note:: + + Note that this feature is controversial and could break with + future versions of GNU ld. The best practice is to make sure + anything you ``#include`` is listed as a dependency in your + CMakeLists.txt (and Spack package). + +.. _write-the-spack-package: + +^^^^^^^^^^^^^^^^^^^^^^^ +Write the Spack Package +^^^^^^^^^^^^^^^^^^^^^^^ + +The Spack package also needs to be written, in tandem with setting up +the build (for example, CMake). The most important part of this task +is declaring dependencies. Here is an example of the Spack package +for the ``mylib`` package (ellipses for brevity): + +.. code-block:: python + + class Mylib(CMakePackage): + """Misc. reusable utilities used by Myapp.""" + + homepage = "https://github.com/citibeth/mylib" + url = "https://github.com/citibeth/mylib/tarball/123" + + version('0.1.2', '3a6acd70085e25f81b63a7e96c504ef9') + version('develop', git='https://github.com/citibeth/mylib.git', + branch='develop') + + variant('everytrace', default=False, + description='Report errors through Everytrace') + ... + + extends('python') + + depends_on('eigen') + depends_on('everytrace', when='+everytrace') + depends_on('proj', when='+proj') + ... + depends_on('cmake', type='build') + depends_on('doxygen', type='build') + + def configure_args(self): + spec = self.spec + return [ + '-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'), + '-DUSE_PROJ4=%s' % ('YES' if '+proj' in spec else 'NO'), + ... + '-DUSE_UDUNITS2=%s' % ('YES' if '+udunits2' in spec else 'NO'), + '-DUSE_GTEST=%s' % ('YES' if '+googletest' in spec else 'NO')] + +This is a standard Spack package that can be used to install +``mylib`` in a production environment. The list of dependencies in +the Spack package will generally be a repeat of the list of CMake +dependencies. This package also has some features that allow it to be +used for development: + +1. It subclasses ``CMakePackage`` instead of ``Package``. This + eliminates the need to write an ``install()`` method, which is + defined in the superclass. Instead, one just needs to write the + ``configure_args()`` method. That method should return the + arguments needed for the ``cmake`` command (beyond the standard + CMake arguments, which Spack will include already). These + arguments are typically used to turn features on/off in the build. + +2. It specifies a non-checksummed version ``develop``. Running + ``spack install mylib@develop`` the ``@develop`` version will + install the latest version off the develop branch. This method of + download is useful for the developer of a project while it is in + active development; however, it should only be used by developers + who control and trust the repository in question! + +3. The ``url``, ``url_for_version()`` and ``homepage`` attributes are + not used in development. Don't worry if you don't have any, or if + they are behind a firewall. + +^^^^^^^^^^^^^^^^ +Build with Spack +^^^^^^^^^^^^^^^^ + +Now that you have a Spack package, you can use Spack to find its +dependencies automatically. For example: + +.. code-block:: console + + $ cd mylib + $ spack setup mylib@local + +The result will be a file ``spconfig.py`` in the top-level +``mylib/`` directory. It is a short script that calls CMake with the +dependencies and options determined by Spack --- similar to what +happens in ``spack install``, but now written out in script form. +From a developer's point of view, you can think of ``spconfig.py`` as +a stand-in for the ``cmake`` command. + +.. note:: + + You can invent any "version" you like for the ``spack setup`` + command. + +.. note:: + + Although ``spack setup`` does not build your package, it does + create and install a module file, and mark in the database that + your package has been installed. This can lead to errors, of + course, if you don't subsequently install your package. + Also... you will need to ``spack uninstall`` before you run + ``spack setup`` again. + + +You can now build your project as usual with CMake: + +.. code-block:: console + + $ mkdir build; cd build + $ ../spconfig.py .. # Instead of cmake .. + $ make + $ make install + +Once your ``make install`` command is complete, your package will be +installed, just as if you'd run ``spack install``. Except you can now +edit, re-build and re-install as often as needed, without checking +into Git or downloading tarballs. + +.. note:: + + The build you get this way will be *almost* the same as the build + from ``spack install``. The only difference is, you will not be + using Spack's compiler wrappers. This difference has not caused + problems in our experience, as long as your project sets + RPATHs as shown above. You DO use RPATHs, right? + +^^^^^^^^^^^^^^^^^^^^ +Build Other Software +^^^^^^^^^^^^^^^^^^^^ + +Now that you've built ``mylib`` with Spack, you might want to build +another package that depends on it --- for example, ``myapp``. This +is accomplished easily enough: + +.. code-block:: console + + $ spack install myapp ^mylib@local + +Note that auto-built software has now been installed *on top of* +manually-built software, without breaking Spack's "web." This +property is useful if you need to debug a package deep in the +dependency hierarchy of your application. It is a *big* advantage of +using ``spack setup`` to build your package's environment. + +If you feel your software is stable, you might wish to install it with +``spack install`` and skip the source directory. You can just use, +for example: + +.. code-block:: console + + $ spack install mylib@develop + +.. _release-your-software: + +^^^^^^^^^^^^^^^^^^^^^ +Release Your Software +^^^^^^^^^^^^^^^^^^^^^ + +You are now ready to release your software as a tarball with a +numbered version, and a Spack package that can build it. If you're +hosted on GitHub, this process will be a bit easier. + +#. Put tag(s) on the version(s) in your GitHub repo you want to be + release versions. For example, a tag ``v0.1.0`` for version 0.1.0. + +#. Set the ``url`` in your ``package.py`` to download a tarball for + the appropriate version. GitHub will give you a tarball for any + commit in the repo, if you tickle it the right way. For example: + + .. code-block:: python + + url = 'https://github.com/citibeth/mylib/tarball/v0.1.2' + +#. Use Spack to determine your version's hash, and cut'n'paste it into + your ``package.py``: + + .. code-block:: console + + $ spack checksum mylib 0.1.2 + ==> Found 1 versions of mylib + 0.1.2 https://github.com/citibeth/mylib/tarball/v0.1.2 + + How many would you like to checksum? (default is 5, q to abort) + ==> Downloading... + ==> Trying to fetch from https://github.com/citibeth/mylib/tarball/v0.1.2 + ######################################################################## 100.0% + ==> Checksummed new versions of mylib: + version('0.1.2', '3a6acd70085e25f81b63a7e96c504ef9') + +#. You should now be able to install released version 0.1.2 of your package with: + + .. code-block:: console + + $ spack install mylib@0.1.2 + +#. There is no need to remove the `develop` version from your package. + Spack concretization will always prefer numbered version to + non-numeric versions. Users will only get it if they ask for it. + +^^^^^^^^^^^^^^^^^^^^^^^^ +Distribute Your Software +^^^^^^^^^^^^^^^^^^^^^^^^ + +Once you've released your software, other people will want to build +it; and you will need to tell them how. In the past, that has meant a +few paragraphs of pros explaining which dependencies to install. But +now you use Spack, and those instructions are written in executable +Python code. But your software has many dependencies, and you know +Spack is the best way to install it: + +#. First, you will want to fork Spack's ``develop`` branch. Your aim + is to provide a stable version of Spack that you KNOW will install + your software. If you make changes to Spack in the process, you + will want to submit pull requests to Spack core. + +#. Add your software's ``package.py`` to that fork. You should submit + a pull request for this as well, unless you don't want the public + to know about your software. + +#. Prepare instructions that read approximately as follows: + + #. Download Spack from your forked repo. + + #. Install Spack; see :ref:`getting_started`. + + #. Set up an appropriate ``packages.yaml`` file. You should tell + your users to include in this file whatever versions/variants + are needed to make your software work correctly (assuming those + are not already in your ``packages.yaml``). + + #. Run ``spack install mylib``. + + #. Run this script to generate the ``module load`` commands or + filesystem view needed to use this software. + +#. Be aware that your users might encounter unexpected bootstrapping + issues on their machines, especially if they are running on older + systems. The :ref:`getting_started` section should cover this, but + there could always be issues. + +^^^^^^^^^^^^^^^^^^^ +Other Build Systems +^^^^^^^^^^^^^^^^^^^ + +``spack setup`` currently only supports CMake-based builds, in +packages that subclass ``CMakePackage``. The intent is that this +mechanism should support a wider range of build systems; for example, +GNU Autotools. Someone well-versed in Autotools is needed to develop +this patch and test it out. + +Python Distutils is another popular build system that should get +``spack setup`` support. For non-compiled languages like Python, +``spack diy`` may be used. Even better is to put the source directory +directly in the user's ``PYTHONPATH``. Then, edits in source files +are immediately available to run without any install process at all! + +^^^^^^^^^^ +Conclusion +^^^^^^^^^^ + +The ``spack setup`` development workflow provides better automation, +flexibility and safety than workflows relying on environment modules +or filesystem views. However, it has some drawbacks: + +#. It currently works only with projects that use the CMake build + system. Support for other build systems is not hard to build, but + will require a small amount of effort for each build system to be + supported. It might not work well with some IDEs. + +#. It only works with packages that sub-class ``StagedPackage``. + Currently, most Spack packages do not. Converting them is not + hard; but must be done on a package-by-package basis. + +#. It requires that users are comfortable with Spack, as they + integrate Spack explicitly in their workflow. Not all users are + willing to do this. + +------------------ +Upstream Bug Fixes +------------------ + +It is not uncommon to discover a bug in an upstream project while +trying to build with Spack. Typically, the bug is in a package that +serves a dependency to something else. This section describes +procedure to work around and ultimately resolve these bugs, while not +delaying the Spack user's main goal. + +^^^^^^^^^^^^^^^^^ +Buggy New Version +^^^^^^^^^^^^^^^^^ + +Sometimes, the old version of a package works fine, but a new version +is buggy. For example, it was once found that `Adios did not build +with hdf5@1.10 <https://github.com/LLNL/spack/issues/1683>`_. If the +old version of ``hdf5`` will work with ``adios``, the suggested +procedure is: + +#. Revert ``adios`` to the old version of ``hdf5``. Put in its + ``adios/package.py``: + + .. code-block:: python + + # Adios does not build with HDF5 1.10 + # See: https://github.com/LLNL/spack/issues/1683 + depends_on('hdf5@:1.9') + +#. Determine whether the problem is with ``hdf5`` or ``adios``, and + report the problem to the appropriate upstream project. In this + case, the problem was with ``adios``. + +#. Once a new version of ``adios`` comes out with the bugfix, modify + ``adios/package.py`` to reflect it: + + .. code-block:: python + + # Adios up to v1.10.0 does not build with HDF5 1.10 + # See: https://github.com/LLNL/spack/issues/1683 + depends_on('hdf5@:1.9', when='@:1.10.0') + depends_on('hdf5', when='@1.10.1:') + +^^^^^^^^^^^^^^^^ +No Version Works +^^^^^^^^^^^^^^^^ + +Sometimes, *no* existing versions of a dependency work for a build. +This typically happens when developing a new project: only then does +the developer notice that existing versions of a dependency are all +buggy, or the non-buggy versions are all missing a critical feature. + +In the long run, the upstream project will hopefully fix the bug and +release a new version. But that could take a while, even if a bugfix +has already been pushed to the project's repository. In the meantime, +the Spack user needs things to work. + +The solution is to create an unofficial Spack release of the project, +as soon as the bug is fixed in *some* repository. A study of the `Git +history <https://github.com/citibeth/spack/commits/efischer/develop/var/spack/repos/builtin/packages/py-proj/package.py>`_ +of ``py-proj/package.py`` is instructive here: + +#. On `April 1 <https://github.com/citibeth/spack/commit/44a1d6a96706affe6ef0a11c3a780b91d21d105a>`_, an initial bugfix was identified for the PyProj project + and a pull request submitted to PyProj. Because the upstream + authors had not yet fixed the bug, the ``py-proj`` Spack package + downloads from a forked repository, set up by the package's author. + A non-numeric version number is used to make it easy to upgrade the + package without recomputing checksums; however, this is an + untrusted download method and should not be distributed. The + package author has now become, temporarily, a maintainer of the + upstream project: + + .. code-block:: python + + # We need the benefits of this PR + # https://github.com/jswhit/pyproj/pull/54 + version('citibeth-latlong2', + git='https://github.com/citibeth/pyproj.git', + branch='latlong2') + + +#. By May 14, the upstream project had accepted a pull request with + the required bugfix. At this point, the forked repository was + deleted. However, the upstream project still had not released a + new version with a bugfix. Therefore, a Spack-only release was + created by specifying the desired hash in the main project + repository. The version number ``@1.9.5.1.1`` was chosen for this + "release" because it's a descendent of the officially released + version ``@1.9.5.1``. This is a trusted download method, and can + be released to the Spack community: + + .. code-block:: python + + # This is not a tagged release of pyproj. + # The changes in this "version" fix some bugs, especially with Python3 use. + version('1.9.5.1.1', 'd035e4bc704d136db79b43ab371b27d2', + url='https://www.github.com/jswhit/pyproj/tarball/0be612cc9f972e38b50a90c946a9b353e2ab140f') + + .. note:: + + It would have been simpler to use Spack's Git download method, + which is also a trusted download in this case: + + .. code-block:: python + + # This is not a tagged release of pyproj. + # The changes in this "version" fix some bugs, especially with Python3 use. + version('1.9.5.1.1', + git='https://github.com/jswhit/pyproj.git', + commit='0be612cc9f972e38b50a90c946a9b353e2ab140f') + + .. note:: + + In this case, the upstream project fixed the bug in its + repository in a relatively timely manner. If that had not been + the case, the numbered version in this step could have been + released from the forked repository. + + +#. The author of the Spack package has now become an unofficial + release engineer for the upstream project. Depending on the + situation, it may be advisable to put ``preferred=True`` on the + latest *officially released* version. + +#. As of August 31, the upstream project still had not made a new + release with the bugfix. In the meantime, Spack-built ``py-proj`` + provides the bugfix needed by packages depending on it. As long as + this works, there is no particular need for the upstream project to + make a new official release. + +#. If the upstream project releases a new official version with the + bugfix, then the unofficial ``version()`` line should be removed + from the Spack package. + +^^^^^^^ +Patches +^^^^^^^ + +Spack's source patching mechanism provides another way to fix bugs in +upstream projects. This has advantages and disadvantages compared to the procedures above. + +Advantages: + + 1. It can fix bugs in existing released versions, and (probably) + future releases as well. + + 2. It is lightweight, does not require a new fork to be set up. + +Disadvantages: + + 1. It is harder to develop and debug a patch, vs. a branch in a + repository. The user loses the automation provided by version + control systems. + + 2. Although patches of a few lines work OK, large patch files can be + hard to create and maintain. + diff --git a/lib/spack/env/cc b/lib/spack/env/cc index 4b8922178a..60e24979c8 100755 --- a/lib/spack/env/cc +++ b/lib/spack/env/cc @@ -204,9 +204,9 @@ fi # It doesn't work with -rpath. # This variable controls whether they are added. add_rpaths=true -if [[ $mode == ld && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then +if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then for arg in "$@"; do - if [[ $arg == -r ]]; then + if [[ ($arg == -r && $mode == ld) || ($arg == -Wl,-r && $mode == ccld) ]]; then add_rpaths=false break fi diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py index c3ecfde4f4..e522fdda6d 100644 --- a/lib/spack/llnl/util/filesystem.py +++ b/lib/spack/llnl/util/filesystem.py @@ -39,15 +39,34 @@ from contextlib import contextmanager import llnl.util.tty as tty from llnl.util.lang import dedupe -__all__ = ['set_install_permissions', 'install', 'install_tree', - 'traverse_tree', - 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp', - 'force_remove', 'join_path', 'ancestor', 'can_access', - 'filter_file', - 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink', - 'set_executable', 'copy_mode', 'unset_executable_mode', - 'remove_dead_links', 'remove_linked_tree', - 'fix_darwin_install_name', 'find_libraries', 'LibraryList'] +__all__ = [ + 'FileFilter', + 'LibraryList', + 'ancestor', + 'can_access', + 'change_sed_delimiter', + 'copy_mode', + 'expand_user', + 'filter_file', + 'find_libraries', + 'fix_darwin_install_name', + 'force_remove', + 'force_symlink', + 'install', + 'install_tree', + 'is_exe', + 'join_path', + 'mkdirp', + 'remove_dead_links', + 'remove_if_dead_link', + 'remove_linked_tree', + 'set_executable', + 'set_install_permissions', + 'touch', + 'touchp', + 'traverse_tree', + 'unset_executable_mode', + 'working_dir'] def filter_file(regex, repl, *filenames, **kwargs): @@ -388,10 +407,20 @@ def remove_dead_links(root): """ for file in os.listdir(root): path = join_path(root, file) - if os.path.islink(path): - real_path = os.path.realpath(path) - if not os.path.exists(real_path): - os.unlink(path) + remove_if_dead_link(path) + + +def remove_if_dead_link(path): + """ + Removes the argument if it is a dead link, does nothing otherwise + + Args: + path: the potential dead link + """ + if os.path.islink(path): + real_path = os.path.realpath(path) + if not os.path.exists(real_path): + os.unlink(path) def remove_linked_tree(path): diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py index f5f53101ae..2e44a94798 100644 --- a/lib/spack/llnl/util/lock.py +++ b/lib/spack/llnl/util/lock.py @@ -28,9 +28,13 @@ import errno import time import socket +import llnl.util.tty as tty + + __all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction', 'LockError'] + # Default timeout in seconds, after which locks will raise exceptions. _default_timeout = 60 @@ -41,51 +45,86 @@ _sleep_time = 1e-5 class Lock(object): """This is an implementation of a filesystem lock using Python's lockf. - In Python, `lockf` actually calls `fcntl`, so this should work with any - filesystem implementation that supports locking through the fcntl calls. - This includes distributed filesystems like Lustre (when flock is enabled) - and recent NFS versions. - + In Python, `lockf` actually calls `fcntl`, so this should work with + any filesystem implementation that supports locking through the fcntl + calls. This includes distributed filesystems like Lustre (when flock + is enabled) and recent NFS versions. """ - def __init__(self, file_path): - self._file_path = file_path - self._fd = None + def __init__(self, path, start=0, length=0): + """Construct a new lock on the file at ``path``. + + By default, the lock applies to the whole file. Optionally, + caller can specify a byte range beginning ``start`` bytes from + the start of the file and extending ``length`` bytes from there. + + This exposes a subset of fcntl locking functionality. It does + not currently expose the ``whence`` parameter -- ``whence`` is + always os.SEEK_SET and ``start`` is always evaluated from the + beginning of the file. + """ + self.path = path + self._file = None self._reads = 0 self._writes = 0 - def _lock(self, op, timeout): + # byte range parameters + self._start = start + self._length = length + + # PID and host of lock holder + self.pid = self.old_pid = None + self.host = self.old_host = None + + def _lock(self, op, timeout=_default_timeout): """This takes a lock using POSIX locks (``fnctl.lockf``). - The lock is implemented as a spin lock using a nonblocking - call to lockf(). + The lock is implemented as a spin lock using a nonblocking call + to lockf(). On acquiring an exclusive lock, the lock writes this process's - pid and host to the lock file, in case the holding process - needs to be killed later. + pid and host to the lock file, in case the holding process needs + to be killed later. If the lock times out, it raises a ``LockError``. """ start_time = time.time() while (time.time() - start_time) < timeout: try: - # If this is already open read-only and we want to - # upgrade to an exclusive write lock, close first. - if self._fd is not None: - flags = fcntl.fcntl(self._fd, fcntl.F_GETFL) - if op == fcntl.LOCK_EX and flags | os.O_RDONLY: - os.close(self._fd) - self._fd = None - - if self._fd is None: - mode = os.O_RDWR if op == fcntl.LOCK_EX else os.O_RDONLY - self._fd = os.open(self._file_path, mode) - - fcntl.lockf(self._fd, op | fcntl.LOCK_NB) + # If we could write the file, we'd have opened it 'r+'. + # Raise an error when we attempt to upgrade to a write lock. + if op == fcntl.LOCK_EX: + if self._file and self._file.mode == 'r': + raise LockError( + "Can't take exclusive lock on read-only file: %s" + % self.path) + + # Create file and parent directories if they don't exist. + if self._file is None: + self._ensure_parent_directory() + + # Prefer to open 'r+' to allow upgrading to write + # lock later if possible. Open read-only if we can't + # write the lock file at all. + os_mode, fd_mode = (os.O_RDWR | os.O_CREAT), 'r+' + if os.path.exists(self.path) and not os.access( + self.path, os.W_OK): + os_mode, fd_mode = os.O_RDONLY, 'r' + + fd = os.open(self.path, os_mode) + self._file = os.fdopen(fd, fd_mode) + + # Try to get the lock (will raise if not available.) + fcntl.lockf(self._file, op | fcntl.LOCK_NB, + self._length, self._start, os.SEEK_SET) + + # All locks read the owner PID and host + self._read_lock_data() + + # Exclusive locks write their PID/host if op == fcntl.LOCK_EX: - os.write( - self._fd, - "pid=%s,host=%s" % (os.getpid(), socket.getfqdn())) + self._write_lock_data() + return except IOError as error: @@ -97,6 +136,40 @@ class Lock(object): raise LockError("Timed out waiting for lock.") + def _ensure_parent_directory(self): + parent = os.path.dirname(self.path) + try: + os.makedirs(parent) + return True + except OSError as e: + # makedirs can fail when diretory already exists. + if not (e.errno == errno.EEXIST and os.path.isdir(parent) or + e.errno == errno.EISDIR): + raise + + def _read_lock_data(self): + """Read PID and host data out of the file if it is there.""" + line = self._file.read() + if line: + pid, host = line.strip().split(',') + _, _, self.pid = pid.rpartition('=') + _, _, self.host = host.rpartition('=') + + def _write_lock_data(self): + """Write PID and host data to the file, recording old values.""" + self.old_pid = self.pid + self.old_host = self.host + + self.pid = os.getpid() + self.host = socket.getfqdn() + + # write pid, host to disk to sync over FS + self._file.seek(0) + self._file.write("pid=%s,host=%s" % (self.pid, self.host)) + self._file.truncate() + self._file.flush() + os.fsync(self._file.fileno()) + def _unlock(self): """Releases a lock using POSIX locks (``fcntl.lockf``) @@ -104,9 +177,10 @@ class Lock(object): be masquerading as write locks, but this removes either. """ - fcntl.lockf(self._fd, fcntl.LOCK_UN) - os.close(self._fd) - self._fd = None + fcntl.lockf(self._file, fcntl.LOCK_UN, + self._length, self._start, os.SEEK_SET) + self._file.close() + self._file = None def acquire_read(self, timeout=_default_timeout): """Acquires a recursive, shared lock for reading. @@ -120,7 +194,9 @@ class Lock(object): """ if self._reads == 0 and self._writes == 0: - self._lock(fcntl.LOCK_SH, timeout) # can raise LockError. + tty.debug('READ LOCK: {0.path}[{0._start}:{0._length}] [Acquiring]' + .format(self)) + self._lock(fcntl.LOCK_SH, timeout=timeout) # can raise LockError. self._reads += 1 return True else: @@ -139,7 +215,10 @@ class Lock(object): """ if self._writes == 0: - self._lock(fcntl.LOCK_EX, timeout) # can raise LockError. + tty.debug( + 'WRITE LOCK: {0.path}[{0._start}:{0._length}] [Acquiring]' + .format(self)) + self._lock(fcntl.LOCK_EX, timeout=timeout) # can raise LockError. self._writes += 1 return True else: @@ -159,6 +238,8 @@ class Lock(object): assert self._reads > 0 if self._reads == 1 and self._writes == 0: + tty.debug('READ LOCK: {0.path}[{0._start}:{0._length}] [Released]' + .format(self)) self._unlock() # can raise LockError. self._reads -= 1 return True @@ -179,6 +260,8 @@ class Lock(object): assert self._writes > 0 if self._writes == 1 and self._reads == 0: + tty.debug('WRITE LOCK: {0.path}[{0._start}:{0._length}] [Released]' + .format(self)) self._unlock() # can raise LockError. self._writes -= 1 return True diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py index c7cfcf9328..98fb3b6917 100644 --- a/lib/spack/spack/build_environment.py +++ b/lib/spack/spack/build_environment.py @@ -381,8 +381,11 @@ def set_module_variables_for_package(pkg, module): def get_rpath_deps(pkg): - """We only need to RPATH immediate dependencies.""" - return pkg.spec.dependencies(deptype='link') + """Return immediate or transitive RPATHs depending on the package.""" + if pkg.transitive_rpaths: + return [d for d in pkg.spec.traverse(root=False, deptype=('link'))] + else: + return pkg.spec.dependencies(deptype='link') def get_rpaths(pkg): @@ -400,6 +403,21 @@ def get_rpaths(pkg): return rpaths +def get_std_cmake_args(cmake_pkg): + # standard CMake arguments + ret = ['-DCMAKE_INSTALL_PREFIX=%s' % cmake_pkg.prefix, + '-DCMAKE_BUILD_TYPE=RelWithDebInfo', + '-DCMAKE_VERBOSE_MAKEFILE=ON'] + if platform.mac_ver()[0]: + ret.append('-DCMAKE_FIND_FRAMEWORK=LAST') + + # Set up CMake rpath + ret.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE') + ret.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(cmake_pkg))) + + return ret + + def parent_class_modules(cls): """ Get list of super class modules that are all descend from spack.Package diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py index 958eb829b4..757c5bca80 100644 --- a/lib/spack/spack/cmd/debug.py +++ b/lib/spack/spack/cmd/debug.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import re from datetime import datetime from glob import glob @@ -53,8 +54,12 @@ def _debug_tarball_suffix(): if not os.path.isdir('.git'): return 'nobranch.nogit.%s' % suffix + # Get symbolic branch name and strip any special chars (mainly '/') symbolic = git( 'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip() + symbolic = re.sub(r'[^\w.-]', '-', symbolic) + + # Get the commit hash too. commit = git( 'rev-parse', '--short', 'HEAD', output=str).strip() @@ -69,12 +74,23 @@ def create_db_tarball(args): tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix() tarball_path = os.path.abspath(tarball_name) - with working_dir(spack.spack_root): + base = os.path.basename(spack.install_path) + transform_args = [] + if 'GNU' in tar('--version', output=str): + transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)] + else: + transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)] + + wd = os.path.dirname(spack.install_path) + with working_dir(wd): files = [spack.installed_db._index_path] - files += glob('%s/*/*/*/.spack/spec.yaml' % spack.install_path) + files += glob('%s/*/*/*/.spack/spec.yaml' % base) files = [os.path.relpath(f) for f in files] - tar('-czf', tarball_path, *files) + args = ['-czf', tarball_path] + args += transform_args + args += files + tar(*args) tty.msg('Created %s' % tarball_name) diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py index 9833e8cdce..08386cac07 100644 --- a/lib/spack/spack/cmd/diy.py +++ b/lib/spack/spack/cmd/diy.py @@ -65,43 +65,40 @@ def diy(self, args): if len(specs) > 1: tty.die("spack diy only takes one spec.") - # Take a write lock before checking for existence. - with spack.installed_db.write_transaction(): - spec = specs[0] - if not spack.repo.exists(spec.name): - tty.warn("No such package: %s" % spec.name) - create = tty.get_yes_or_no("Create this package?", default=False) - if not create: - tty.msg("Exiting without creating.") - sys.exit(1) - else: - tty.msg("Running 'spack edit -f %s'" % spec.name) - edit_package(spec.name, spack.repo.first_repo(), None, True) - return + spec = specs[0] + if not spack.repo.exists(spec.name): + tty.warn("No such package: %s" % spec.name) + create = tty.get_yes_or_no("Create this package?", default=False) + if not create: + tty.msg("Exiting without creating.") + sys.exit(1) + else: + tty.msg("Running 'spack edit -f %s'" % spec.name) + edit_package(spec.name, spack.repo.first_repo(), None, True) + return - if not spec.versions.concrete: - tty.die( - "spack diy spec must have a single, concrete version. " - "Did you forget a package version number?") + if not spec.versions.concrete: + tty.die( + "spack diy spec must have a single, concrete version. " + "Did you forget a package version number?") - spec.concretize() - package = spack.repo.get(spec) + spec.concretize() + package = spack.repo.get(spec) - if package.installed: - tty.error("Already installed in %s" % package.prefix) - tty.msg("Uninstall or try adding a version suffix for this " - "DIY build.") - sys.exit(1) + if package.installed: + tty.error("Already installed in %s" % package.prefix) + tty.msg("Uninstall or try adding a version suffix for this DIY build.") + sys.exit(1) - # Forces the build to run out of the current directory. - package.stage = DIYStage(os.getcwd()) + # Forces the build to run out of the current directory. + package.stage = DIYStage(os.getcwd()) - # TODO: make this an argument, not a global. - spack.do_checksum = False + # TODO: make this an argument, not a global. + spack.do_checksum = False - package.do_install( - keep_prefix=args.keep_prefix, - ignore_deps=args.ignore_deps, - verbose=not args.quiet, - keep_stage=True, # don't remove source dir for DIY. - dirty=args.dirty) + package.do_install( + keep_prefix=args.keep_prefix, + install_deps=not args.ignore_deps, + verbose=not args.quiet, + keep_stage=True, # don't remove source dir for DIY. + dirty=args.dirty) diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py index 634a603ddc..cc9b6c73ee 100644 --- a/lib/spack/spack/cmd/install.py +++ b/lib/spack/spack/cmd/install.py @@ -37,6 +37,10 @@ def setup_parser(subparser): '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps', help="Do not try to install dependencies of requested packages.") subparser.add_argument( + '-d', '--dependencies-only', action='store_true', dest='deps_only', + help='Install dependencies of this package, ' + + 'but not the package itself.') + subparser.add_argument( '-j', '--jobs', action='store', type=int, help="Explicitly set number of make jobs. Default is #cpus.") subparser.add_argument( @@ -82,16 +86,16 @@ def install(parser, args): specs = spack.cmd.parse_specs(args.packages, concretize=True) for spec in specs: package = spack.repo.get(spec) - with spack.installed_db.write_transaction(): - package.do_install( - keep_prefix=args.keep_prefix, - keep_stage=args.keep_stage, - ignore_deps=args.ignore_deps, - make_jobs=args.jobs, - run_tests=args.run_tests, - verbose=args.verbose, - fake=args.fake, - dirty=args.dirty, - explicit=True, - stop_at=args.stop_at - ) + package.do_install( + keep_prefix=args.keep_prefix, + keep_stage=args.keep_stage, + install_deps=not args.ignore_deps, + install_self=not args.deps_only, + make_jobs=args.jobs, + run_tests=args.run_tests, + verbose=args.verbose, + fake=args.fake, + dirty=args.dirty, + explicit=True, + stop_at=args.stop_at + ) diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py index c921efd1bd..e1389df69f 100644 --- a/lib/spack/spack/cmd/list.py +++ b/lib/spack/spack/cmd/list.py @@ -22,36 +22,51 @@ # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import argparse +import cgi +import fnmatch +import re import sys +from StringIO import StringIO + import llnl.util.tty as tty -import argparse +import spack from llnl.util.tty.colify import colify -import spack -import fnmatch -import re +description = "Print available spack packages to stdout in different formats" + +formatters = {} -description = "List available spack packages" + +def formatter(func): + """Decorator used to register formatters""" + formatters[func.__name__] = func + return func def setup_parser(subparser): subparser.add_argument( 'filter', nargs=argparse.REMAINDER, - help='Optional glob patterns to filter results.') - subparser.add_argument( - '-s', '--sensitive', action='store_true', default=False, - help='Use case-sensitive filtering. Default is case sensitive, ' - 'unless the query contains a capital letter.') + help='Optional case-insensitive glob patterns to filter results.') subparser.add_argument( '-d', '--search-description', action='store_true', default=False, help='Filtering will also search the description for a match.') + subparser.add_argument( + '--format', default='name_only', choices=formatters, + help='Format to be used to print the output [default: name_only]') -def list(parser, args): - # Start with all package names. - pkgs = set(spack.repo.all_package_names()) +def filter_by_name(pkgs, args): + """ + Filters the sequence of packages according to user prescriptions - # filter if a filter arg was provided + Args: + pkgs: sequence of packages + args: parsed command line arguments + + Returns: + filtered and sorted list of packages + """ if args.filter: res = [] for f in args.filter: @@ -60,10 +75,7 @@ def list(parser, args): else: r = fnmatch.translate(f) - re_flags = re.I - if any(l.isupper for l in f) or args.sensitive: - re_flags = 0 - rc = re.compile(r, flags=re_flags) + rc = re.compile(r, flags=re.IGNORECASE) res.append(rc) if args.search_description: @@ -80,11 +92,91 @@ def list(parser, args): return f.match(p) pkgs = [p for p in pkgs if any(match(p, f) for f in res)] - # sort before displaying. - sorted_packages = sorted(pkgs, key=lambda s: s.lower()) + return sorted(pkgs, key=lambda s: s.lower()) + - # Print all the package names in columns +@formatter +def name_only(pkgs): indent = 0 if sys.stdout.isatty(): - tty.msg("%d packages." % len(sorted_packages)) - colify(sorted_packages, indent=indent) + tty.msg("%d packages." % len(pkgs)) + colify(pkgs, indent=indent) + + +@formatter +def rst(pkgs): + """Print out information on all packages in restructured text.""" + + def github_url(pkg): + """Link to a package file on github.""" + url = 'https://github.com/LLNL/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py' + return url.format(pkg.name) + + def rst_table(elts): + """Print out a RST-style table.""" + cols = StringIO() + ncol, widths = colify(elts, output=cols, tty=True) + header = ' '.join('=' * (w - 1) for w in widths) + return '%s\n%s%s' % (header, cols.getvalue(), header) + + pkg_names = pkgs + pkgs = [spack.repo.get(name) for name in pkg_names] + + print('.. _package-list:') + print('') + print('============') + print('Package List') + print('============') + print('') + print('This is a list of things you can install using Spack. It is') + print('automatically generated based on the packages in the latest Spack') + print('release.') + print('') + print('Spack currently has %d mainline packages:' % len(pkgs)) + print('') + print(rst_table('`%s`_' % p for p in pkg_names)) + print('') + + # Output some text for each package. + for pkg in pkgs: + print('-----') + print('') + print('.. _%s:' % pkg.name) + print('') + # Must be at least 2 long, breaks for single letter packages like R. + print('-' * max(len(pkg.name), 2)) + print(pkg.name) + print('-' * max(len(pkg.name), 2)) + print('') + print('Homepage:') + print(' * `%s <%s>`__' % (cgi.escape(pkg.homepage), pkg.homepage)) + print('') + print('Spack package:') + print(' * `%s/package.py <%s>`__' % (pkg.name, github_url(pkg))) + print('') + if pkg.versions: + print('Versions:') + print(' ' + ', '.join(str(v) for v in + reversed(sorted(pkg.versions)))) + print('') + + for deptype in spack.alldeps: + deps = pkg.dependencies_of_type(deptype) + if deps: + print('%s Dependencies' % deptype.capitalize()) + print(' ' + ', '.join('%s_' % d if d in pkg_names + else d for d in deps)) + print('') + + print('Description:') + print(pkg.format_doc(indent=2)) + print('') + + +def list(parser, args): + # Retrieve the names of all the packages + pkgs = set(spack.repo.all_package_names()) + # Filter the set appropriately + sorted_packages = filter_by_name(pkgs, args) + # Print to stdout + formatters[args.format](sorted_packages) diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py index 506cf0913f..2ae279a41e 100644 --- a/lib/spack/spack/cmd/md5.py +++ b/lib/spack/spack/cmd/md5.py @@ -25,6 +25,7 @@ import argparse import hashlib import os +from urlparse import urlparse import llnl.util.tty as tty import spack.util.crypto @@ -49,13 +50,23 @@ def compute_md5_checksum(url): return value +def normalized(files): + for p in files: + result = urlparse(p) + value = p + if not result.scheme: + value = os.path.abspath(p) + yield value + + def md5(parser, args): if not args.files: setup_parser.parser.print_help() return 1 + urls = [x for x in normalized(args.files)] results = [] - for url in args.files: + for url in urls: try: checksum = compute_md5_checksum(url) results.append((checksum, url)) @@ -70,4 +81,4 @@ def md5(parser, args): checksum = 'checksum' if len(results) == 1 else 'checksums' tty.msg("%d MD5 %s:" % (len(results), checksum)) for checksum, url in results: - print "%s %s" % (checksum, url) + print("{0} {1}".format(checksum, url)) diff --git a/lib/spack/spack/cmd/package_list.py b/lib/spack/spack/cmd/package_list.py deleted file mode 100644 index 42f408af96..0000000000 --- a/lib/spack/spack/cmd/package_list.py +++ /dev/null @@ -1,104 +0,0 @@ -############################################################################## -# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. -# Produced at the Lawrence Livermore National Laboratory. -# -# This file is part of Spack. -# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. -# LLNL-CODE-647188 -# -# For details, see https://github.com/llnl/spack -# Please also see the LICENSE file for our notice and the LGPL. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU Lesser General Public License (as -# published by the Free Software Foundation) version 2.1, February 1999. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and -# conditions of the GNU Lesser General Public License for more details. -# -# You should have received a copy of the GNU Lesser General Public -# License along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -############################################################################## -import cgi -from StringIO import StringIO -from llnl.util.tty.colify import * -import spack - -description = "Print a list of all packages in reStructuredText." - - -def github_url(pkg): - """Link to a package file on github.""" - url = "https://github.com/LLNL/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py" - return url.format(pkg.name) - - -def rst_table(elts): - """Print out a RST-style table.""" - cols = StringIO() - ncol, widths = colify(elts, output=cols, tty=True) - header = " ".join("=" * (w - 1) for w in widths) - return "%s\n%s%s" % (header, cols.getvalue(), header) - - -def print_rst_package_list(): - """Print out information on all packages in restructured text.""" - pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower()) - pkg_names = [p.name for p in pkgs] - - print ".. _package-list:" - print - print "============" - print "Package List" - print "============" - print - print "This is a list of things you can install using Spack. It is" - print "automatically generated based on the packages in the latest Spack" - print "release." - print - print "Spack currently has %d mainline packages:" % len(pkgs) - print - print rst_table("`%s`_" % p for p in pkg_names) - print - - # Output some text for each package. - for pkg in pkgs: - print "-----" - print - print ".. _%s:" % pkg.name - print - # Must be at least 2 long, breaks for single letter packages like R. - print "-" * max(len(pkg.name), 2) - print pkg.name - print "-" * max(len(pkg.name), 2) - print - print "Homepage:" - print " * `%s <%s>`__" % (cgi.escape(pkg.homepage), pkg.homepage) - print - print "Spack package:" - print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg)) - print - if pkg.versions: - print "Versions:" - print " " + ", ".join(str(v) for v in - reversed(sorted(pkg.versions))) - print - - for deptype in spack.alldeps: - deps = pkg.dependencies_of_type(deptype) - if deps: - print "%s Dependencies" % deptype.capitalize() - print " " + ", ".join("%s_" % d if d in pkg_names - else d for d in deps) - print - - print "Description:" - print pkg.format_doc(indent=2) - print - - -def package_list(parser, args): - print_rst_package_list() diff --git a/lib/spack/spack/cmd/test_install.py b/lib/spack/spack/cmd/test_install.py index 8e7173e9a2..c35f2740a0 100644 --- a/lib/spack/spack/cmd/test_install.py +++ b/lib/spack/spack/cmd/test_install.py @@ -180,7 +180,8 @@ def install_single_spec(spec, number_of_jobs): start_time = time.time() package.do_install(keep_prefix=False, keep_stage=True, - ignore_deps=False, + install_deps=True, + install_self=True, make_jobs=number_of_jobs, verbose=True, fake=False) diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py index 8957d1c908..bbcd2e787c 100644 --- a/lib/spack/spack/cmd/uninstall.py +++ b/lib/spack/spack/cmd/uninstall.py @@ -54,9 +54,10 @@ def setup_parser(subparser): subparser.add_argument( '-a', '--all', action='store_true', dest='all', help="USE CAREFULLY. Remove ALL installed packages that match each " - "supplied spec. i.e., if you say uninstall libelf, ALL versions " - "of libelf are uninstalled. This is both useful and dangerous, " - "like rm -r.") + "supplied spec. i.e., if you say uninstall `libelf`," + " ALL versions of `libelf` are uninstalled. If no spec is " + "supplied all installed software will be uninstalled. This " + "is both useful and dangerous, like rm -r.") subparser.add_argument( '-d', '--dependents', action='store_true', dest='dependents', @@ -99,7 +100,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False): has_errors = True # No installed package matches the query - if len(matching) == 0 and not force: + if len(matching) == 0: tty.error("%s does not match any installed packages." % spec) has_errors = True @@ -157,44 +158,49 @@ def do_uninstall(specs, force): item.do_uninstall(force=force) -def uninstall(parser, args): - if not args.packages: - tty.die("uninstall requires at least one package argument.") - - with spack.installed_db.write_transaction(): +def get_uninstall_list(args): + specs = [any] + if args.packages: specs = spack.cmd.parse_specs(args.packages) - # Gets the list of installed specs that match the ones give via cli - # takes care of '-a' is given in the cli - uninstall_list = concretize_specs(specs, args.all, args.force) - dependent_list = installed_dependents( - uninstall_list) # takes care of '-d' - - # Process dependent_list and update uninstall_list - has_error = False - if dependent_list and not args.dependents and not args.force: - for spec, lst in dependent_list.items(): - tty.error("Will not uninstall %s" % - spec.format("$_$@$%@$#", color=True)) - print('') - print("The following packages depend on it:") - spack.cmd.display_specs(lst, **display_args) - print('') - has_error = True - elif args.dependents: - for key, lst in dependent_list.items(): - uninstall_list.extend(lst) - uninstall_list = list(set(uninstall_list)) - - if has_error: - tty.die('You can use spack uninstall --dependents ' - 'to uninstall these dependencies as well') - - if not args.yes_to_all: - tty.msg("The following packages will be uninstalled : ") + # Gets the list of installed specs that match the ones give via cli + # takes care of '-a' is given in the cli + uninstall_list = concretize_specs(specs, args.all, args.force) + # Takes care of '-d' + dependent_list = installed_dependents(uninstall_list) + # Process dependent_list and update uninstall_list + has_error = False + if dependent_list and not args.dependents and not args.force: + for spec, lst in dependent_list.items(): + tty.error("Will not uninstall %s" % + spec.format("$_$@$%@$#", color=True)) print('') - spack.cmd.display_specs(uninstall_list, **display_args) + print("The following packages depend on it:") + spack.cmd.display_specs(lst, **display_args) print('') - spack.cmd.ask_for_confirmation('Do you want to proceed ? ') + has_error = True + elif args.dependents: + for key, lst in dependent_list.items(): + uninstall_list.extend(lst) + uninstall_list = list(set(uninstall_list)) + if has_error: + tty.die('You can use spack uninstall --dependents ' + 'to uninstall these dependencies as well') + + return uninstall_list + + +def uninstall(parser, args): + if not args.packages and not args.all: + tty.die("uninstall requires at least one package argument.") + + uninstall_list = get_uninstall_list(args) + + if not args.yes_to_all: + tty.msg("The following packages will be uninstalled : ") + print('') + spack.cmd.display_specs(uninstall_list, **display_args) + print('') + spack.cmd.ask_for_confirmation('Do you want to proceed ? ') - # Uninstall everything on the list - do_uninstall(uninstall_list, args.force) + # Uninstall everything on the list + do_uninstall(uninstall_list, args.force) diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py index 726dee62e3..9c9e9e10ff 100644 --- a/lib/spack/spack/concretize.py +++ b/lib/spack/spack/concretize.py @@ -33,6 +33,7 @@ or user preferences. TODO: make this customizable and allow users to configure concretization policies. """ +from __future__ import print_function import spack import spack.spec import spack.compilers @@ -42,6 +43,7 @@ from spack.version import * from functools import partial from itertools import chain from spack.config import * +import spack.preferred_packages class DefaultConcretizer(object): @@ -160,23 +162,59 @@ class DefaultConcretizer(object): # If there are known available versions, return the most recent # version that satisfies the spec pkg = spec.package - cmp_versions = partial(spack.pkgsort.version_compare, spec.name) - valid_versions = sorted( - [v for v in pkg.versions - if any(v.satisfies(sv) for sv in spec.versions)], - cmp=cmp_versions) - def prefer_key(v): - return pkg.versions.get(Version(v)).get('preferred', False) - valid_versions.sort(key=prefer_key, reverse=True) + # ---------- Produce prioritized list of versions + # Get list of preferences from packages.yaml + preferred = spack.pkgsort + # NOTE: spack.pkgsort == spack.preferred_packages.PreferredPackages() + + yaml_specs = [ + x[0] for x in + preferred._spec_for_pkgname(spec.name, 'version', None)] + n = len(yaml_specs) + yaml_index = dict( + [(spc, n - index) for index, spc in enumerate(yaml_specs)]) + + # List of versions we could consider, in sorted order + unsorted_versions = [ + v for v in pkg.versions + if any(v.satisfies(sv) for sv in spec.versions)] + + # The keys below show the order of precedence of factors used + # to select a version when concretizing. The item with + # the "largest" key will be selected. + # + # NOTE: When COMPARING VERSIONS, the '@develop' version is always + # larger than other versions. BUT when CONCRETIZING, + # the largest NON-develop version is selected by + # default. + keys = [( + # ------- Special direction from the user + # Respect order listed in packages.yaml + yaml_index.get(v, -1), + + # The preferred=True flag (packages or packages.yaml or both?) + pkg.versions.get(Version(v)).get('preferred', False), + + # ------- Regular case: use latest non-develop version by default. + # Avoid @develop version, which would otherwise be the "largest" + # in straight version comparisons + not v.isdevelop(), + + # Compare the version itself + # This includes the logic: + # a) develop > everything (disabled by "not v.isdevelop() above) + # b) numeric > non-numeric + # c) Numeric or string comparison + v) for v in unsorted_versions] + keys.sort(reverse=True) + + # List of versions in complete sorted order + valid_versions = [x[-1] for x in keys] + # -------------------------- if valid_versions: - # Disregard @develop and take the next valid version - if ver(valid_versions[0]) == ver('develop') and \ - len(valid_versions) > 1: - spec.versions = ver([valid_versions[1]]) - else: - spec.versions = ver([valid_versions[0]]) + spec.versions = ver([valid_versions[0]]) else: # We don't know of any SAFE versions that match the given # spec. Grab the spec's versions and grab the highest @@ -255,7 +293,7 @@ class DefaultConcretizer(object): spec.architecture = spack.architecture.Arch() return True - # Concretize the operating_system and target based of the spec + # Concretize the operating_system and target based of the spec ret = any((self._concretize_platform(spec), self._concretize_operating_system(spec), self._concretize_target(spec))) diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py index f73d3765c8..e9bd07d92c 100644 --- a/lib/spack/spack/database.py +++ b/lib/spack/spack/database.py @@ -33,7 +33,7 @@ The database serves two purposes: 2. It will allow us to track external installations as well as lost packages and their dependencies. -Prior ot the implementation of this store, a direcotry layout served +Prior to the implementation of this store, a directory layout served as the authoritative database of packages in Spack. This module provides a cache and a sanity checking mechanism for what is in the filesystem. @@ -156,13 +156,13 @@ class Database(object): self._index_path = join_path(self._db_dir, 'index.yaml') self._lock_path = join_path(self._db_dir, 'lock') + # This is for other classes to use to lock prefix directories. + self.prefix_lock_path = join_path(self._db_dir, 'prefix_lock') + # Create needed directories and files if not os.path.exists(self._db_dir): mkdirp(self._db_dir) - if not os.path.exists(self._lock_path): - touch(self._lock_path) - # initialize rest of state. self.lock = Lock(self._lock_path) self._data = {} diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py index 86acd075cd..dda9fb32d8 100644 --- a/lib/spack/spack/directives.py +++ b/lib/spack/spack/directives.py @@ -212,7 +212,10 @@ def _depends_on(pkg, spec, when=None, type=None): @directive(('dependencies', '_deptypes')) def depends_on(pkg, spec, when=None, type=None): - """Creates a dict of deps with specs defining when they apply.""" + """Creates a dict of deps with specs defining when they apply. + This directive is to be used inside a Package definition to declare + that the package requires other packages to be built first. + @see The section "Dependency specs" in the Spack Packaging Guide.""" _depends_on(pkg, spec, when=when, type=type) diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 21802c4556..4b8829c32f 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -159,6 +159,8 @@ class URLFetchStrategy(FetchStrategy): self.expand_archive = kwargs.get('expand', True) + self.extension = kwargs.get('extension', None) + if not self.url: raise ValueError("URLFetchStrategy requires a url for fetching.") @@ -270,7 +272,7 @@ class URLFetchStrategy(FetchStrategy): "URLFetchStrategy couldn't find archive file", "Failed on expand() for URL %s" % self.url) - decompress = decompressor_for(self.archive_file) + decompress = decompressor_for(self.archive_file, self.extension) # Expand all tarballs in their own directory to contain # exploding tarballs. diff --git a/lib/spack/spack/file_cache.py b/lib/spack/spack/file_cache.py index 0a66166fd8..31ae009836 100644 --- a/lib/spack/spack/file_cache.py +++ b/lib/spack/spack/file_cache.py @@ -77,10 +77,7 @@ class FileCache(object): def _get_lock(self, key): """Create a lock for a key, if necessary, and return a lock object.""" if key not in self._locks: - lock_file = self._lock_path(key) - if not os.path.exists(lock_file): - touch(lock_file) - self._locks[key] = Lock(lock_file) + self._locks[key] = Lock(self._lock_path(key)) return self._locks[key] def init_entry(self, key): diff --git a/lib/spack/spack/hooks/licensing.py b/lib/spack/spack/hooks/licensing.py index 9010b84154..a99099749c 100644 --- a/lib/spack/spack/hooks/licensing.py +++ b/lib/spack/spack/hooks/licensing.py @@ -157,6 +157,11 @@ def symlink_license(pkg): license_dir = os.path.dirname(link_name) if not os.path.exists(license_dir): mkdirp(license_dir) + + # If example file already exists, overwrite it with a symlink + if os.path.exists(link_name): + os.remove(link_name) + if os.path.exists(target): os.symlink(target, link_name) tty.msg("Added local symlink %s to global license file" % diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py index 2361fb5448..0f72e4e25c 100644 --- a/lib/spack/spack/mirror.py +++ b/lib/spack/spack/mirror.py @@ -52,7 +52,14 @@ def mirror_archive_filename(spec, fetcher): if isinstance(fetcher, fs.URLFetchStrategy): if fetcher.expand_archive: # If we fetch with a URLFetchStrategy, use URL's archive type - ext = url.downloaded_file_extension(fetcher.url) + ext = url.determine_url_file_extension(fetcher.url) + ext = ext or spec.package.versions[spec.package.version].get( + 'extension', None) + ext = ext.lstrip('.') + if not ext: + raise MirrorError( + "%s version does not specify an extension" % spec.name + + " and could not parse extension from %s" % fetcher.url) else: # If the archive shouldn't be expanded, don't check extension. ext = None diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py index 498f3d4a6d..a9b5b2069a 100644 --- a/lib/spack/spack/package.py +++ b/lib/spack/spack/package.py @@ -33,6 +33,7 @@ Homebrew makes it very easy to create packages. For a complete rundown on spack and how it differs from homebrew, look at the README. """ +import contextlib import copy import functools import inspect @@ -43,8 +44,8 @@ import sys import textwrap import time from StringIO import StringIO -from urlparse import urlparse +import llnl.util.lock import llnl.util.tty as tty import spack import spack.build_environment @@ -58,12 +59,14 @@ import spack.repository import spack.url import spack.util.web from llnl.util.filesystem import * +from llnl.util.filesystem import * +from llnl.util.lang import * from llnl.util.lang import * from llnl.util.link_tree import LinkTree from llnl.util.tty.log import log_output from spack import directory_layout from spack.stage import Stage, ResourceStage, StageComposite -from spack.util.compression import allowed_archive +from spack.util.crypto import bit_length from spack.util.environment import dump_environment from spack.util.executable import ProcessError from spack.version import * @@ -426,7 +429,7 @@ class PackageBase(object): parallel = False ... - This changes thd default behavior so that make is sequential. If you still + This changes the default behavior so that make is sequential. If you still want to build some parts in parallel, you can do this in your install function: @@ -480,27 +483,42 @@ class PackageBase(object): # """By default we build in parallel. Subclasses can override this.""" parallel = True + """# jobs to use for parallel make. If set, overrides default of ncpus.""" make_jobs = None + """By default do not run tests within package's install()""" run_tests = False + """Most packages are NOT extendable. Set to True if you want extensions.""" extendable = False + + """When True, add RPATHs for the entire DAG. When False, add RPATHs only + for immediate dependencies.""" + transitive_rpaths = True + """List of prefix-relative file paths (or a single path). If these do not exist after install, or if they exist but are not files, sanity checks fail. """ sanity_check_is_file = [] + """List of prefix-relative directory paths (or a single path). If these do not exist after install, or if they exist but are not directories, sanity checks will fail. """ sanity_check_is_dir = [] + """Per-process lock objects for each install prefix.""" + prefix_locks = {} + def __init__(self, spec): # this determines how the package should be built. self.spec = spec + # Lock on the prefix shared resource. Will be set in prefix property + self._prefix_lock = None + # Name of package is the name of its module, without the # containing module names. self.name = self.module.__name__ @@ -646,8 +664,13 @@ class PackageBase(object): # TODO: move this out of here and into some URL extrapolation module? def url_for_version(self, version): - """ - Returns a URL that you can download a new version of this package from. + """Returns a URL from which the specified version of this package + may be downloaded. + + version: class Version + The version for which a URL is sought. + + See Class Version (version.py) """ if not isinstance(version, Version): version = Version(version) @@ -844,6 +867,29 @@ class PackageBase(object): return dependents @property + def prefix_lock(self): + """Prefix lock is a byte range lock on the nth byte of a file. + + The lock file is ``spack.installed_db.prefix_lock`` -- the DB + tells us what to call it and it lives alongside the install DB. + + n is the sys.maxsize-bit prefix of the DAG hash. This makes + likelihood of collision is very low AND it gives us + readers-writer lock semantics with just a single lockfile, so no + cleanup required. + """ + if self._prefix_lock is None: + prefix = self.spec.prefix + if prefix not in Package.prefix_locks: + Package.prefix_locks[prefix] = llnl.util.lock.Lock( + spack.installed_db.prefix_lock_path, + self.spec.dag_hash_bit_prefix(bit_length(sys.maxsize)), 1) + + self._prefix_lock = Package.prefix_locks[prefix] + + return self._prefix_lock + + @property def prefix(self): """Get the prefix into which this package should be installed.""" return self.spec.prefix @@ -1028,10 +1074,29 @@ class PackageBase(object): resource_stage_folder = '-'.join(pieces) return resource_stage_folder + @contextlib.contextmanager + def _prefix_read_lock(self): + try: + self.prefix_lock.acquire_read(60) + yield self + finally: + self.prefix_lock.release_read() + + @contextlib.contextmanager + def _prefix_write_lock(self): + try: + self.prefix_lock.acquire_write(60) + yield self + finally: + self.prefix_lock.release_write() + + install_phases = set(['configure', 'build', 'install', 'provenance']) + def do_install(self, keep_prefix=False, keep_stage=False, - ignore_deps=False, + install_deps=True, + install_self=True, skip_patch=False, verbose=False, make_jobs=None, @@ -1050,8 +1115,10 @@ class PackageBase(object): :param keep_stage: By default, stage is destroyed only if there are \ no exceptions during build. Set to True to keep the stage even with exceptions. - :param ignore_deps: Don't install dependencies before installing this \ - package + :param install_deps: Install dependencies before installing this \ + package + :param install_self: Install this package once dependencies have \ + been installed. :param fake: Don't really build; install fake stub files instead. :param skip_patch: Skip patch stage of build if True. :param verbose: Display verbose build output (by default, suppresses \ @@ -1059,6 +1126,7 @@ class PackageBase(object): :param dirty: Don't clean the build environment before installing. :param make_jobs: Number of make jobs to use for install. Default is \ ncpus + :param force: Install again, even if already installed. :param run_tests: Run tests within the package's install() """ if not self.spec.concrete: @@ -1072,30 +1140,41 @@ class PackageBase(object): return # Ensure package is not already installed - if spack.install_layout.check_installed(self.spec): - tty.msg("%s is already installed in %s" % (self.name, self.prefix)) - rec = spack.installed_db.get_record(self.spec) - if (not rec.explicit) and explicit: - with spack.installed_db.write_transaction(): - rec = spack.installed_db.get_record(self.spec) - rec.explicit = True - return + layout = spack.install_layout + with self._prefix_read_lock(): + if layout.check_installed(self.spec): + tty.msg( + "%s is already installed in %s" % (self.name, self.prefix)) + rec = spack.installed_db.get_record(self.spec) + if (not rec.explicit) and explicit: + with spack.installed_db.write_transaction(): + rec = spack.installed_db.get_record(self.spec) + rec.explicit = True + return self._do_install_pop_kwargs(kwargs) tty.msg("Installing %s" % self.name) # First, install dependencies recursively. - if not ignore_deps: - self.do_install_dependencies(keep_prefix=keep_prefix, - keep_stage=keep_stage, - ignore_deps=ignore_deps, - fake=fake, - skip_patch=skip_patch, - verbose=verbose, - make_jobs=make_jobs, - run_tests=run_tests, - dirty=dirty) + if install_deps: + for dep in self.spec.dependencies(): + dep.package.do_install( + keep_prefix=keep_prefix, + keep_stage=keep_stage, + install_deps=install_deps, + install_self=True, + fake=fake, + skip_patch=skip_patch, + verbose=verbose, + make_jobs=make_jobs, + run_tests=run_tests, + dirty=dirty) + + # The rest of this function is to install ourself, + # once deps have been installed. + if not install_self: + return # Set run_tests flag before starting build. self.run_tests = run_tests @@ -1103,6 +1182,7 @@ class PackageBase(object): # Set parallelism before starting build. self.make_jobs = make_jobs + # ------------------- BEGIN def build_process() # Then install the package itself. def build_process(): """Forked for each build. Has its own process and python @@ -1123,7 +1203,7 @@ class PackageBase(object): self.source_directory = self.stage.source_path try: - with self.stage: + with contextlib.nested(self.stage, self._prefix_write_lock()): # Run the pre-install hook in the child process after # the directory is created. spack.hooks.pre_install(self) @@ -1267,11 +1347,6 @@ class PackageBase(object): raise InstallError( "Install failed for %s. Nothing was installed!" % self.name) - def do_install_dependencies(self, **kwargs): - # Pass along paths of dependencies here - for dep in self.spec.dependencies(): - dep.package.do_install(**kwargs) - @property def build_log_path(self): if self.installed: @@ -1419,11 +1494,12 @@ class PackageBase(object): raise PackageStillNeededError(self.spec, dependents) # Pre-uninstall hook runs first. - spack.hooks.pre_uninstall(self) - - # Uninstalling in Spack only requires removing the prefix. - self.remove_prefix() - spack.installed_db.remove(self.spec) + with self._prefix_write_lock(): + spack.hooks.pre_uninstall(self) + # Uninstalling in Spack only requires removing the prefix. + self.remove_prefix() + # + spack.installed_db.remove(self.spec) tty.msg("Successfully uninstalled %s" % self.spec.short_spec) # Once everything else is done, run post install hooks @@ -1698,7 +1774,8 @@ class CMakePackage(PackageBase): build_type = 'RelWithDebInfo' args = ['-DCMAKE_INSTALL_PREFIX:PATH={0}'.format(pkg.prefix), - '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type)] + '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type), + '-DCMAKE_VERBOSE_MAKEFILE=ON'] if platform.mac_ver()[0]: args.append('-DCMAKE_FIND_FRAMEWORK:STRING=LAST') @@ -1762,16 +1839,6 @@ def flatten_dependencies(spec, flat_dir): dep_files.merge(flat_dir + '/' + name) -def validate_package_url(url_string): - """Determine whether spack can handle a particular URL or not.""" - url = urlparse(url_string) - if url.scheme not in _ALLOWED_URL_SCHEMES: - tty.die("Invalid protocol in URL: '%s'" % url_string) - - if not allowed_archive(url_string): - tty.die("Invalid file type in URL: '%s'" % url_string) - - def dump_packages(spec, path): """Dump all package information for a spec and its dependencies. diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py index 2be48b43c1..d7f2379486 100644 --- a/lib/spack/spack/provider_index.py +++ b/lib/spack/spack/provider_index.py @@ -232,7 +232,8 @@ class ProviderIndex(object): spdict[provided_spec] = opdict[provided_spec] continue - spdict[provided_spec] += opdict[provided_spec] + spdict[provided_spec] = \ + spdict[provided_spec].union(opdict[provided_spec]) def remove_provider(self, pkg_name): """Remove a provider from the ProviderIndex.""" diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py index ba9cea876d..fc4bf41e34 100644 --- a/lib/spack/spack/spec.py +++ b/lib/spack/spack/spec.py @@ -120,6 +120,7 @@ from spack.util.prefix import Prefix from spack.util.string import * import spack.util.spack_yaml as syaml from spack.util.spack_yaml import syaml_dict +from spack.util.crypto import prefix_bits from spack.version import * from spack.provider_index import ProviderIndex @@ -963,13 +964,10 @@ class Spec(object): return Prefix(spack.install_layout.path_for_spec(self)) def dag_hash(self, length=None): - """ - Return a hash of the entire spec DAG, including connectivity. - """ + """Return a hash of the entire spec DAG, including connectivity.""" if self._hash: return self._hash[:length] else: - # XXX(deptype): ignore 'build' dependencies here yaml_text = syaml.dump( self.to_node_dict(), default_flow_style=True, width=sys.maxint) sha = hashlib.sha1(yaml_text) @@ -978,6 +976,10 @@ class Spec(object): self._hash = b32_hash return b32_hash + def dag_hash_bit_prefix(self, bits): + """Get the first <bits> bits of the DAG hash as an integer type.""" + return base32_prefix_bits(self.dag_hash(), bits) + def to_node_dict(self): d = syaml_dict() @@ -999,6 +1001,8 @@ class Spec(object): if self.architecture: d['arch'] = self.architecture.to_dict() + # TODO: restore build dependencies here once we have less picky + # TODO: concretization. deps = self.dependencies_dict(deptype=('link', 'run')) if deps: d['dependencies'] = syaml_dict([ @@ -2723,6 +2727,16 @@ def parse_anonymous_spec(spec_like, pkg_name): return anon_spec +def base32_prefix_bits(hash_string, bits): + """Return the first <bits> bits of a base32 string as an integer.""" + if bits > len(hash_string) * 5: + raise ValueError("Too many bits! Requested %d bit prefix of '%s'." + % (bits, hash_string)) + + hash_bytes = base64.b32decode(hash_string, casefold=True) + return prefix_bits(hash_bytes, bits) + + class SpecError(spack.error.SpackError): """Superclass for all errors that occur while constructing specs.""" diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index b659cfb2fb..c0dfbba987 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -23,12 +23,15 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## import os +import sys import errno +import hashlib import shutil import tempfile from urlparse import urljoin import llnl.util.tty as tty +import llnl.util.lock from llnl.util.filesystem import * import spack.util.pattern as pattern @@ -38,6 +41,7 @@ import spack.config import spack.fetch_strategy as fs import spack.error from spack.version import * +from spack.util.crypto import prefix_bits, bit_length STAGE_PREFIX = 'spack-stage-' @@ -88,8 +92,12 @@ class Stage(object): similar, and are intended to persist for only one run of spack. """ - def __init__(self, url_or_fetch_strategy, - name=None, mirror_path=None, keep=False, path=None): + """Shared dict of all stage locks.""" + stage_locks = {} + + def __init__( + self, url_or_fetch_strategy, + name=None, mirror_path=None, keep=False, path=None, lock=True): """Create a stage object. Parameters: url_or_fetch_strategy @@ -147,6 +155,20 @@ class Stage(object): # Flag to decide whether to delete the stage folder on exit or not self.keep = keep + # File lock for the stage directory. We use one file for all + # stage locks. See Spec.prefix_lock for details on this approach. + self._lock = None + if lock: + if self.name not in Stage.stage_locks: + sha1 = hashlib.sha1(self.name).digest() + lock_id = prefix_bits(sha1, bit_length(sys.maxsize)) + stage_lock_path = join_path(spack.stage_path, '.lock') + + Stage.stage_locks[self.name] = llnl.util.lock.Lock( + stage_lock_path, lock_id, 1) + + self._lock = Stage.stage_locks[self.name] + def __enter__(self): """ Entering a stage context will create the stage directory @@ -154,6 +176,8 @@ class Stage(object): Returns: self """ + if self._lock is not None: + self._lock.acquire_write(timeout=60) self.create() return self @@ -175,6 +199,9 @@ class Stage(object): if exc_type is None and not self.keep: self.destroy() + if self._lock is not None: + self._lock.release_write() + def _need_to_create_path(self): """Makes sure nothing weird has happened since the last time we looked at path. Returns True if path already exists and is ok. @@ -302,9 +329,11 @@ class Stage(object): # the checksum will be the same. digest = None expand = True + extension = None if isinstance(self.default_fetcher, fs.URLFetchStrategy): digest = self.default_fetcher.digest expand = self.default_fetcher.expand_archive + extension = self.default_fetcher.extension # Have to skip the checksum for things archived from # repositories. How can this be made safer? @@ -313,10 +342,12 @@ class Stage(object): # Add URL strategies for all the mirrors with the digest for url in urls: fetchers.insert( - 0, fs.URLFetchStrategy(url, digest, expand=expand)) + 0, fs.URLFetchStrategy( + url, digest, expand=expand, extension=extension)) fetchers.insert( 0, spack.fetch_cache.fetcher( - self.mirror_path, digest, expand=expand)) + self.mirror_path, digest, expand=expand, + extension=extension)) # Look for the archive in list_url package_name = os.path.dirname(self.mirror_path) @@ -412,7 +443,8 @@ class Stage(object): """ # Create the top-level stage directory mkdirp(spack.stage_path) - remove_dead_links(spack.stage_path) + remove_if_dead_link(self.path) + # If a tmp_root exists then create a directory there and then link it # in the stage area, otherwise create the stage directory in self.path if self._need_to_create_path(): @@ -538,7 +570,7 @@ class DIYStage(object): def chdir_to_source(self): self.chdir() - def fetch(self, mirror_only): + def fetch(self, *args, **kwargs): tty.msg("No need to fetch for DIY.") def check(self): diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py index 32cbe13ce1..4f62cd85e9 100644 --- a/lib/spack/spack/test/lock.py +++ b/lib/spack/spack/test/lock.py @@ -25,6 +25,7 @@ """ These tests ensure that our lock works correctly. """ +import os import shutil import tempfile import unittest @@ -44,7 +45,6 @@ class LockTest(unittest.TestCase): def setUp(self): self.tempdir = tempfile.mkdtemp() self.lock_path = join_path(self.tempdir, 'lockfile') - touch(self.lock_path) def tearDown(self): shutil.rmtree(self.tempdir, ignore_errors=True) @@ -64,98 +64,185 @@ class LockTest(unittest.TestCase): # # Process snippets below can be composed into tests. # - def acquire_write(self, barrier): - lock = Lock(self.lock_path) - lock.acquire_write() # grab exclusive lock - barrier.wait() - barrier.wait() # hold the lock until exception raises in other procs. - - def acquire_read(self, barrier): - lock = Lock(self.lock_path) - lock.acquire_read() # grab shared lock - barrier.wait() - barrier.wait() # hold the lock until exception raises in other procs. - - def timeout_write(self, barrier): - lock = Lock(self.lock_path) - barrier.wait() # wait for lock acquire in first process - self.assertRaises(LockError, lock.acquire_write, 0.1) - barrier.wait() + def acquire_write(self, start=0, length=0): + def fn(barrier): + lock = Lock(self.lock_path, start, length) + lock.acquire_write() # grab exclusive lock + barrier.wait() + barrier.wait() # hold the lock until timeout in other procs. + return fn + + def acquire_read(self, start=0, length=0): + def fn(barrier): + lock = Lock(self.lock_path, start, length) + lock.acquire_read() # grab shared lock + barrier.wait() + barrier.wait() # hold the lock until timeout in other procs. + return fn + + def timeout_write(self, start=0, length=0): + def fn(barrier): + lock = Lock(self.lock_path, start, length) + barrier.wait() # wait for lock acquire in first process + self.assertRaises(LockError, lock.acquire_write, 0.1) + barrier.wait() + return fn - def timeout_read(self, barrier): - lock = Lock(self.lock_path) - barrier.wait() # wait for lock acquire in first process - self.assertRaises(LockError, lock.acquire_read, 0.1) - barrier.wait() + def timeout_read(self, start=0, length=0): + def fn(barrier): + lock = Lock(self.lock_path, start, length) + barrier.wait() # wait for lock acquire in first process + self.assertRaises(LockError, lock.acquire_read, 0.1) + barrier.wait() + return fn # # Test that exclusive locks on other processes time out when an # exclusive lock is held. # def test_write_lock_timeout_on_write(self): - self.multiproc_test(self.acquire_write, self.timeout_write) + self.multiproc_test(self.acquire_write(), self.timeout_write()) def test_write_lock_timeout_on_write_2(self): self.multiproc_test( - self.acquire_write, self.timeout_write, self.timeout_write) + self.acquire_write(), self.timeout_write(), self.timeout_write()) def test_write_lock_timeout_on_write_3(self): self.multiproc_test( - self.acquire_write, self.timeout_write, self.timeout_write, - self.timeout_write) + self.acquire_write(), self.timeout_write(), self.timeout_write(), + self.timeout_write()) + + def test_write_lock_timeout_on_write_ranges(self): + self.multiproc_test( + self.acquire_write(0, 1), self.timeout_write(0, 1)) + + def test_write_lock_timeout_on_write_ranges_2(self): + self.multiproc_test( + self.acquire_write(0, 64), self.acquire_write(65, 1), + self.timeout_write(0, 1), self.timeout_write(63, 1)) + + def test_write_lock_timeout_on_write_ranges_3(self): + self.multiproc_test( + self.acquire_write(0, 1), self.acquire_write(1, 1), + self.timeout_write(), self.timeout_write(), self.timeout_write()) + + def test_write_lock_timeout_on_write_ranges_4(self): + self.multiproc_test( + self.acquire_write(0, 1), self.acquire_write(1, 1), + self.acquire_write(2, 456), self.acquire_write(500, 64), + self.timeout_write(), self.timeout_write(), self.timeout_write()) # # Test that shared locks on other processes time out when an # exclusive lock is held. # def test_read_lock_timeout_on_write(self): - self.multiproc_test(self.acquire_write, self.timeout_read) + self.multiproc_test(self.acquire_write(), self.timeout_read()) def test_read_lock_timeout_on_write_2(self): self.multiproc_test( - self.acquire_write, self.timeout_read, self.timeout_read) + self.acquire_write(), self.timeout_read(), self.timeout_read()) def test_read_lock_timeout_on_write_3(self): self.multiproc_test( - self.acquire_write, self.timeout_read, self.timeout_read, - self.timeout_read) + self.acquire_write(), self.timeout_read(), self.timeout_read(), + self.timeout_read()) + + def test_read_lock_timeout_on_write_ranges(self): + """small write lock, read whole file.""" + self.multiproc_test(self.acquire_write(0, 1), self.timeout_read()) + + def test_read_lock_timeout_on_write_ranges_2(self): + """small write lock, small read lock""" + self.multiproc_test(self.acquire_write(0, 1), self.timeout_read(0, 1)) + + def test_read_lock_timeout_on_write_ranges_3(self): + """two write locks, overlapping read locks""" + self.multiproc_test( + self.acquire_write(0, 1), self.acquire_write(64, 128), + self.timeout_read(0, 1), self.timeout_read(128, 256)) # # Test that exclusive locks time out when shared locks are held. # def test_write_lock_timeout_on_read(self): - self.multiproc_test(self.acquire_read, self.timeout_write) + self.multiproc_test(self.acquire_read(), self.timeout_write()) def test_write_lock_timeout_on_read_2(self): self.multiproc_test( - self.acquire_read, self.timeout_write, self.timeout_write) + self.acquire_read(), self.timeout_write(), self.timeout_write()) def test_write_lock_timeout_on_read_3(self): self.multiproc_test( - self.acquire_read, self.timeout_write, self.timeout_write, - self.timeout_write) + self.acquire_read(), self.timeout_write(), self.timeout_write(), + self.timeout_write()) + + def test_write_lock_timeout_on_read_ranges(self): + self.multiproc_test(self.acquire_read(0, 1), self.timeout_write()) + + def test_write_lock_timeout_on_read_ranges_2(self): + self.multiproc_test(self.acquire_read(0, 1), self.timeout_write(0, 1)) + + def test_write_lock_timeout_on_read_ranges_3(self): + self.multiproc_test( + self.acquire_read(0, 1), self.acquire_read(10, 1), + self.timeout_write(0, 1), self.timeout_write(10, 1)) + + def test_write_lock_timeout_on_read_ranges_4(self): + self.multiproc_test( + self.acquire_read(0, 64), + self.timeout_write(10, 1), self.timeout_write(32, 1)) + + def test_write_lock_timeout_on_read_ranges_5(self): + self.multiproc_test( + self.acquire_read(64, 128), + self.timeout_write(65, 1), self.timeout_write(127, 1), + self.timeout_write(90, 10)) # # Test that exclusive locks time while lots of shared locks are held. # def test_write_lock_timeout_with_multiple_readers_2_1(self): self.multiproc_test( - self.acquire_read, self.acquire_read, self.timeout_write) + self.acquire_read(), self.acquire_read(), self.timeout_write()) def test_write_lock_timeout_with_multiple_readers_2_2(self): self.multiproc_test( - self.acquire_read, self.acquire_read, self.timeout_write, - self.timeout_write) + self.acquire_read(), self.acquire_read(), self.timeout_write(), + self.timeout_write()) def test_write_lock_timeout_with_multiple_readers_3_1(self): self.multiproc_test( - self.acquire_read, self.acquire_read, self.acquire_read, - self.timeout_write) + self.acquire_read(), self.acquire_read(), self.acquire_read(), + self.timeout_write()) def test_write_lock_timeout_with_multiple_readers_3_2(self): self.multiproc_test( - self.acquire_read, self.acquire_read, self.acquire_read, - self.timeout_write, self.timeout_write) + self.acquire_read(), self.acquire_read(), self.acquire_read(), + self.timeout_write(), self.timeout_write()) + + def test_write_lock_timeout_with_multiple_readers_2_1_ranges(self): + self.multiproc_test( + self.acquire_read(0, 10), self.acquire_read(5, 10), + self.timeout_write(5, 5)) + + def test_write_lock_timeout_with_multiple_readers_2_3_ranges(self): + self.multiproc_test( + self.acquire_read(0, 10), self.acquire_read(5, 15), + self.timeout_write(0, 1), self.timeout_write(11, 3), + self.timeout_write(7, 1)) + + def test_write_lock_timeout_with_multiple_readers_3_1_ranges(self): + self.multiproc_test( + self.acquire_read(0, 5), self.acquire_read(5, 5), + self.acquire_read(10, 5), + self.timeout_write(0, 15)) + + def test_write_lock_timeout_with_multiple_readers_3_2_ranges(self): + self.multiproc_test( + self.acquire_read(0, 5), self.acquire_read(5, 5), + self.acquire_read(10, 5), + self.timeout_write(3, 10), self.timeout_write(5, 1)) # # Test that read can be upgraded to write. @@ -172,19 +259,42 @@ class LockTest(unittest.TestCase): lock.acquire_read() self.assertTrue(lock._reads == 1) self.assertTrue(lock._writes == 0) + self.assertTrue(lock._file.mode == 'r+') lock.acquire_write() self.assertTrue(lock._reads == 1) self.assertTrue(lock._writes == 1) + self.assertTrue(lock._file.mode == 'r+') lock.release_write() self.assertTrue(lock._reads == 1) self.assertTrue(lock._writes == 0) + self.assertTrue(lock._file.mode == 'r+') lock.release_read() self.assertTrue(lock._reads == 0) self.assertTrue(lock._writes == 0) - self.assertTrue(lock._fd is None) + self.assertTrue(lock._file is None) + + # + # Test that read-only file can be read-locked but not write-locked. + # + def test_upgrade_read_to_write_fails_with_readonly_file(self): + # ensure lock file exists the first time, so we open it read-only + # to begin wtih. + touch(self.lock_path) + os.chmod(self.lock_path, 0444) + + lock = Lock(self.lock_path) + self.assertTrue(lock._reads == 0) + self.assertTrue(lock._writes == 0) + + lock.acquire_read() + self.assertTrue(lock._reads == 1) + self.assertTrue(lock._writes == 0) + self.assertTrue(lock._file.mode == 'r') + + self.assertRaises(LockError, lock.acquire_write) # # Longer test case that ensures locks are reusable. Ordering is diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py index 40cdb02966..0bc63bcf0f 100644 --- a/lib/spack/spack/test/spec_dag.py +++ b/lib/spack/spack/test/spec_dag.py @@ -523,3 +523,37 @@ class SpecDagTest(MockPackagesTest): level = descend_and_check(dag.to_node_dict()) # level just makes sure we are doing something here self.assertTrue(level >= 5) + + def test_hash_bits(self): + """Ensure getting first n bits of a base32-encoded DAG hash works.""" + + # RFC 4648 base32 decode table + b32 = dict((j, i) for i, j in enumerate('abcdefghijklmnopqrstuvwxyz')) + b32.update(dict((j, i) for i, j in enumerate('234567', 26))) + + # some package hashes + tests = [ + '35orsd4cenv743hg4i5vxha2lzayycby', + '6kfqtj7dap3773rxog6kkmoweix5gpwo', + 'e6h6ff3uvmjbq3azik2ckr6ckwm3depv', + 'snz2juf4ij7sv77cq3vs467q6acftmur', + '4eg47oedi5bbkhpoxw26v3oe6vamkfd7', + 'vrwabwj6umeb5vjw6flx2rnft3j457rw'] + + for test_hash in tests: + # string containing raw bits of hash ('1' and '0') + expected = ''.join([format(b32[c], '#07b').replace('0b', '') + for c in test_hash]) + + for bits in (1, 2, 3, 4, 7, 8, 9, 16, 64, 117, 128, 160): + actual_int = spack.spec.base32_prefix_bits(test_hash, bits) + fmt = "#0%sb" % (bits + 2) + actual = format(actual_int, fmt).replace('0b', '') + + self.assertEqual(expected[:bits], actual) + + self.assertRaises( + ValueError, spack.spec.base32_prefix_bits, test_hash, 161) + + self.assertRaises( + ValueError, spack.spec.base32_prefix_bits, test_hash, 256) diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py index 41d72e7c34..9b4dc29f35 100644 --- a/lib/spack/spack/test/versions.py +++ b/lib/spack/spack/test/versions.py @@ -428,3 +428,6 @@ class VersionsTest(unittest.TestCase): self.assertEqual(str(b), '1_2-3') # Raise TypeError on tuples self.assertRaises(TypeError, b.__getitem__, 1, 2) + +if __name__ == '__main__': + unittest.main() diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py index 02c9c04380..af4b8a51ef 100644 --- a/lib/spack/spack/url.py +++ b/lib/spack/spack/url.py @@ -142,7 +142,7 @@ def split_url_extension(path): return prefix, ext, suffix -def downloaded_file_extension(path): +def determine_url_file_extension(path): """This returns the type of archive a URL refers to. This is sometimes confusing because of URLs like: @@ -160,8 +160,6 @@ def downloaded_file_extension(path): return 'tar.gz' prefix, ext, suffix = split_url_extension(path) - if not ext: - raise UrlParseError("Cannot deduce archive type in %s" % path, path) return ext diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py index 64554ab2f7..982a02d021 100644 --- a/lib/spack/spack/util/compression.py +++ b/lib/spack/spack/util/compression.py @@ -40,9 +40,10 @@ def allowed_archive(path): return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES) -def decompressor_for(path): +def decompressor_for(path, extension=None): """Get the appropriate decompressor for a path.""" - if path.endswith(".zip"): + if ((extension and re.match(r'\.?zip$', extension)) or + path.endswith('.zip')): unzip = which('unzip', required=True) return unzip tar = which('tar', required=True) diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py index 22777fdb68..d074716022 100644 --- a/lib/spack/spack/util/crypto.py +++ b/lib/spack/spack/util/crypto.py @@ -100,3 +100,24 @@ class Checker(object): self.sum = checksum( self.hash_fun, filename, block_size=self.block_size) return self.sum == self.hexdigest + + +def prefix_bits(byte_array, bits): + """Return the first <bits> bits of a byte array as an integer.""" + result = 0 + n = 0 + for i, b in enumerate(byte_array): + n += 8 + result = (result << 8) | ord(b) + if n >= bits: + break + + result >>= (n - bits) + return result + + +def bit_length(num): + """Number of bits required to represent an integer in binary.""" + s = bin(num) + s = s.lstrip('-0b') + return len(s) diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py index 5c27b92df5..4fe4bd26ba 100644 --- a/lib/spack/spack/util/executable.py +++ b/lib/spack/spack/util/executable.py @@ -185,11 +185,11 @@ class Executable(object): finally: if close_ostream: - output.close() + ostream.close() if close_estream: - error.close() + estream.close() if close_istream: - input.close() + istream.close() def __eq__(self, other): return self.exe == other.exe diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py index e3efa6c87a..67a22f4660 100644 --- a/lib/spack/spack/version.py +++ b/lib/spack/spack/version.py @@ -107,6 +107,10 @@ def coerced(method): return coercing_method +def _numeric_lt(self0, other): + """Compares two versions, knowing they're both numeric""" + + @total_ordering class Version(object): """Class to represent versions""" @@ -154,6 +158,27 @@ class Version(object): def highest(self): return self + def isnumeric(self): + """Tells if this version is numeric (vs. a non-numeric version). A + version will be numeric as long as the first section of it is, + even if it contains non-numerica portions. + + Some numeric versions: + 1 + 1.1 + 1.1a + 1.a.1b + Some non-numeric versions: + develop + system + myfavoritebranch + """ + return isinstance(self.version[0], numbers.Integral) + + def isdevelop(self): + """Triggers on the special case of the `@develop` version.""" + return self.string == 'develop' + @coerced def satisfies(self, other): """A Version 'satisfies' another if it is at least as specific and has @@ -225,6 +250,27 @@ class Version(object): def concrete(self): return self + def _numeric_lt(self, other): + """Compares two versions, knowing they're both numeric""" + # Standard comparison of two numeric versions + for a, b in zip(self.version, other.version): + if a == b: + continue + else: + # Numbers are always "newer" than letters. + # This is for consistency with RPM. See patch + # #60884 (and details) from bugzilla #50977 in + # the RPM project at rpm.org. Or look at + # rpmvercmp.c if you want to see how this is + # implemented there. + if type(a) != type(b): + return type(b) == int + else: + return a < b + # If the common prefix is equal, the one + # with more segments is bigger. + return len(self.version) < len(other.version) + @coerced def __lt__(self, other): """Version comparison is designed for consistency with the way RPM @@ -240,30 +286,33 @@ class Version(object): if self.version == other.version: return False - # dev is __gt__ than anything but itself. - if other.string == 'develop': - return True - - # If lhs is dev then it can't be < than anything - if self.string == 'develop': - return False - - for a, b in zip(self.version, other.version): - if a == b: - continue - else: - # Numbers are always "newer" than letters. This is for - # consistency with RPM. See patch #60884 (and details) - # from bugzilla #50977 in the RPM project at rpm.org. - # Or look at rpmvercmp.c if you want to see how this is - # implemented there. - if type(a) != type(b): - return type(b) == int - else: - return a < b - - # If the common prefix is equal, the one with more segments is bigger. - return len(self.version) < len(other.version) + # First priority: anything < develop + sdev = self.isdevelop() + if sdev: + return False # source = develop, it can't be < anything + + # Now we know !sdev + odev = other.isdevelop() + if odev: + return True # src < dst + + # now we know neither self nor other isdevelop(). + + # Principle: Non-numeric is less than numeric + # (so numeric will always be preferred by default) + if self.isnumeric(): + if other.isnumeric(): + return self._numeric_lt(other) + else: # self = numeric; other = non-numeric + # Numeric > Non-numeric (always) + return False + else: + if other.isnumeric(): # self = non-numeric, other = numeric + # non-numeric < numeric (always) + return True + else: # Both non-numeric + # Maybe consider other ways to compare here... + return self.string < other.string @coerced def __eq__(self, other): diff --git a/share/spack/csh/convert-pyext.sh b/share/spack/csh/convert-pyext.sh deleted file mode 100644 index a48bcdbcca..0000000000 --- a/share/spack/csh/convert-pyext.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash --noprofile -PYEXT_REGEX=".*/.*/package.py" - -find var/spack/repos/builtin/packages/ -type f -regextype sed -regex ${PYEXT_REGEX} -exec \ - sed -i 's/python('\''setup.py'\'', /setup_py(/' {} \; diff --git a/share/spack/qa/check_dependencies b/share/spack/qa/check_dependencies index cf3d204f48..e999463b03 100755 --- a/share/spack/qa/check_dependencies +++ b/share/spack/qa/check_dependencies @@ -65,6 +65,32 @@ for dep in "$@"; do exit 1 fi + + # Flake8 and Sphinx require setuptools in order to run. + # Otherwise, they print out this error message: + # + # Traceback (most recent call last): + # File: "/usr/bin/flake8", line 5, in <module> + # from pkg_resources import load_entry_point + # ImportError: No module named pkg_resources + # + # Print a more useful error message if setuptools not found. + if [[ $dep == flake8 || $dep == sphinx* ]]; then + # Find which Python is being run + # Spack-installed packages have a hard-coded shebang + python_cmd=$(head -n 1 $(which $dep) | cut -c 3-) + # May not have a shebang + if [[ $python_cmd != *python* ]]; then + python_cmd=python + fi + # Check if setuptools is in the PYTHONPATH + if ! $python_cmd -c "import setuptools" 2> /dev/null; then + echo "ERROR: setuptools is required to run $dep." + echo "Please add it to your PYTHONPATH." + + exit 1 + fi + fi done echo "Dependencies found." diff --git a/share/spack/qa/run-flake8-tests b/share/spack/qa/run-flake8-tests index 350ef3161f..6fe97160e3 100755 --- a/share/spack/qa/run-flake8-tests +++ b/share/spack/qa/run-flake8-tests @@ -23,10 +23,6 @@ deps=( # Check for dependencies "$QA_DIR/check_dependencies" "${deps[@]}" || exit 1 -# Move to root directory of Spack -# Allows script to be run from anywhere -cd "$SPACK_ROOT" - # Gather array of changed files changed=($("$QA_DIR/changed_files" "*.py")) @@ -36,6 +32,10 @@ if [[ ! "${changed[@]}" ]]; then exit 0 fi +# Move to root directory of Spack +# Allows script to be run from anywhere +cd "$SPACK_ROOT" + function cleanup { # Restore original package files after modifying them. for file in "${changed[@]}"; do diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py index b31e365336..89b61b3ecb 100644 --- a/var/spack/repos/builtin/packages/atlas/package.py +++ b/var/spack/repos/builtin/packages/atlas/package.py @@ -37,6 +37,9 @@ class Atlas(Package): """ homepage = "http://math-atlas.sourceforge.net/" + version('3.10.3', 'd6ce4f16c2ad301837cfb3dade2f7cef', + url='https://sourceforge.net/projects/math-atlas/files/Stable/3.10.3/atlas3.10.3.tar.bz2') + version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da', url='https://sourceforge.net/projects/math-atlas/files/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True) # not all packages (e.g. Trilinos@12.6.3) stopped using deprecated in 3.6.0 diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py index 22351a2c3a..90fe28fc2c 100644 --- a/var/spack/repos/builtin/packages/boost/package.py +++ b/var/spack/repos/builtin/packages/boost/package.py @@ -110,12 +110,14 @@ class Boost(Package): description="Additionally build shared libraries") variant('multithreaded', default=True, description="Build multi-threaded versions of libraries") - variant('singlethreaded', default=True, + variant('singlethreaded', default=False, description="Build single-threaded versions of libraries") variant('icu', default=False, description="Build with Unicode and ICU suport") variant('graph', default=False, description="Build the Boost Graph library") + variant('taggedlayout', default=False, + description="Augment library names with build options") depends_on('icu4c', when='+icu') depends_on('python', when='+python') @@ -208,12 +210,20 @@ class Boost(Package): if '+singlethreaded' in spec: threadingOpts.append('single') if not threadingOpts: - raise RuntimeError("""At least one of {singlethreaded, - multithreaded} must be enabled""") + raise RuntimeError("At least one of {singlethreaded, " + + "multithreaded} must be enabled") + + if '+taggedlayout' in spec: + layout = 'tagged' + else: + if len(threadingOpts) > 1: + raise RuntimeError("Cannot build both single and " + + "multi-threaded targets with system layout") + layout = 'system' options.extend([ 'link=%s' % ','.join(linkTypes), - '--layout=tagged' + '--layout=%s' % layout ]) if not spec.satisfies('%intel'): @@ -223,6 +233,12 @@ class Boost(Package): return threadingOpts + def add_buildopt_symlinks(self, prefix): + with working_dir(prefix.lib): + for lib in os.listdir(os.curdir): + prefix, remainder = lib.split('.', 1) + symlink(lib, '%s-mt.%s' % (prefix, remainder)) + def install(self, spec, prefix): # On Darwin, Boost expects the Darwin libtool. However, one of the # dependencies may have pulled in Spack's GNU libtool, and these two @@ -281,11 +297,16 @@ class Boost(Package): threadingOpts = self.determine_b2_options(spec, b2_options) + b2('--clean') + # In theory it could be done on one call but it fails on # Boost.MPI if the threading options are not separated. for threadingOpt in threadingOpts: b2('install', 'threading=%s' % threadingOpt, *b2_options) + if '+multithreaded' in spec and '~taggedlayout' in spec: + self.add_buildopt_symlinks(prefix) + # The shared libraries are not installed correctly # on Darwin; correct this if (sys.platform == 'darwin') and ('+shared' in spec): diff --git a/var/spack/repos/builtin/packages/cantera/package.py b/var/spack/repos/builtin/packages/cantera/package.py index 9a9bc5bdbd..66e1197669 100644 --- a/var/spack/repos/builtin/packages/cantera/package.py +++ b/var/spack/repos/builtin/packages/cantera/package.py @@ -96,8 +96,7 @@ class Cantera(Package): options.extend([ 'build_thread_safe=yes', 'boost_inc_dir={0}'.format(spec['boost'].prefix.include), - 'boost_lib_dir={0}'.format(spec['boost'].prefix.lib), - 'boost_thread_lib=boost_thread-mt,boost_system-mt' + 'boost_lib_dir={0}'.format(spec['boost'].prefix.lib) ]) else: options.append('build_thread_safe=no') diff --git a/var/spack/repos/builtin/packages/cgal/package.py b/var/spack/repos/builtin/packages/cgal/package.py index d986f77487..ebfd3f9250 100644 --- a/var/spack/repos/builtin/packages/cgal/package.py +++ b/var/spack/repos/builtin/packages/cgal/package.py @@ -85,7 +85,7 @@ class Cgal(Package): else: options.append('-DBUILD_SHARED_LIBS:BOOL=OFF') - cmake('.', *options) - - make() - make('install') + with working_dir('spack-build', create=True): + cmake('..', *options) + make() + make('install') diff --git a/var/spack/repos/builtin/packages/charm/package.py b/var/spack/repos/builtin/packages/charm/package.py index d67ac80de1..1ec53c2138 100644 --- a/var/spack/repos/builtin/packages/charm/package.py +++ b/var/spack/repos/builtin/packages/charm/package.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## +import os import platform import shutil import sys @@ -47,6 +48,8 @@ class Charm(Package): # Support OpenMPI; see # <https://charm.cs.illinois.edu/redmine/issues/1206> patch("mpi.patch") + # Ignore compiler warnings while configuring + patch("strictpass.patch") # Communication mechanisms (choose exactly one) # TODO: Support Blue Gene/Q PAMI, Cray GNI, Cray shmem, CUDA @@ -169,4 +172,19 @@ class Charm(Package): # this wouldn't be difficult. build = Executable(join_path(".", "build")) build(target, version, *options) + + # Charm++'s install script does not copy files, it only creates + # symbolic links. Fix this. + for dirpath, dirnames, filenames in os.walk(prefix): + for filename in filenames: + filepath = join_path(dirpath, filename) + if os.path.islink(filepath): + tmppath = filepath + ".tmp" + # Skip dangling symbolic links + try: + shutil.copy2(filepath, tmppath) + os.remove(filepath) + os.rename(tmppath, filepath) + except: + pass shutil.rmtree(join_path(prefix, "tmp")) diff --git a/var/spack/repos/builtin/packages/charm/strictpass.patch b/var/spack/repos/builtin/packages/charm/strictpass.patch new file mode 100644 index 0000000000..44aa4fbd38 --- /dev/null +++ b/var/spack/repos/builtin/packages/charm/strictpass.patch @@ -0,0 +1,16 @@ +--- old/src/scripts/configure ++++ new/src/scripts/configure +@@ -2146,13 +2146,6 @@ + test_result $? "$1" "$2" "$3" + strictpass=$pass + strictfail=$fail +- if test $pass -eq 1 +- then +- if cat out | grep -i "warn" > /dev/null 2>&1 +- then +- strictpass="0" && strictfail="1" +- fi +- fi + cat out >> $charmout + /bin/rm -f out + } diff --git a/var/spack/repos/builtin/packages/converge/package.py b/var/spack/repos/builtin/packages/converge/package.py new file mode 100644 index 0000000000..429be8542d --- /dev/null +++ b/var/spack/repos/builtin/packages/converge/package.py @@ -0,0 +1,69 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * +from distutils.dir_util import copy_tree +import os + + +class Converge(Package): + """CONVERGE is a revolutionary computational fluid dynamics (CFD) program + that eliminates the grid generation bottleneck from the simulation process. + CONVERGE was developed by engine simulation experts and is straightforward + to use for both engine and non-engine simulations. Unlike many CFD + programs, CONVERGE automatically generates a perfectly orthogonal, + structured grid at runtime based on simple, user-defined grid control + parameters. This grid generation method completely eliminates the need to + manually generate a grid. In addition, CONVERGE offers many other features + to expedite the setup process and to ensure that your simulations are as + computationally efficient as possible. + + Note: CONVERGE is licensed software. You will need to create an account on + the CONVERGE homepage and download CONVERGE yourself. Spack will search + your current directory for the download file. Alternatively, add this file + to a mirror so that Spack can find it. For instructions on how to set up a + mirror, see http://spack.readthedocs.io/en/latest/mirrors.html""" + + homepage = "https://www.convergecfd.com/" + url = "file://%s/converge_install_2.3.16.tar.gz" % os.getcwd() + + version('2.3.16', '8b80f1e73a63181c427c7732ad279986') + + variant('mpi', default=True, description='Build with MPI support') + + # The Converge Getting Started Guide recommends: + # MPICH: 3.1.4 + # HP-MPI: 2.0.3+ + # OpenMPI: 1.6.* + depends_on('mpi', when='+mpi') + + # Licensing + license_required = True + license_comment = '#' + license_files = ['license/license.lic'] + license_vars = ['RLM_LICENSE'] + license_url = 'http://www.reprisesoftware.com/RLM_License_Administration.pdf' + + def install(self, spec, prefix): + copy_tree('.', prefix) diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py index 0d293e5d92..1dee94bfff 100644 --- a/var/spack/repos/builtin/packages/dealii/package.py +++ b/var/spack/repos/builtin/packages/dealii/package.py @@ -32,6 +32,10 @@ class Dealii(Package): homepage = "https://www.dealii.org" url = "https://github.com/dealii/dealii/releases/download/v8.4.1/dealii-8.4.1.tar.gz" + # Don't add RPATHs to this package for the full build DAG. + # only add for immediate deps. + transitive_rpaths = False + version('8.4.2', '84c6bd3f250d3e0681b645d24cb987a7') version('8.4.1', 'efbaf16f9ad59cfccad62302f36c3c1d') version('8.4.0', 'ac5dbf676096ff61e092ce98c80c2b00') diff --git a/var/spack/repos/builtin/packages/everytrace-example/package.py b/var/spack/repos/builtin/packages/everytrace-example/package.py new file mode 100644 index 0000000000..8a85423192 --- /dev/null +++ b/var/spack/repos/builtin/packages/everytrace-example/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class EverytraceExample(CMakePackage): + """Get stack trace EVERY time a program exits.""" + + homepage = "https://github.com/citibeth/everytrace-example" + version('develop', + git='https://github.com/citibeth/everytrace-example.git', + branch='develop') + + depends_on('cmake', type='build') + depends_on('everytrace+mpi+fortran') + + # Currently the only MPI this everytrace works with. + depends_on('openmpi') + + def configure_args(self): + return [] + + def setup_environment(self, spack_env, env): + env.prepend_path('PATH', join_path(self.prefix, 'bin')) diff --git a/var/spack/repos/builtin/packages/everytrace/package.py b/var/spack/repos/builtin/packages/everytrace/package.py new file mode 100644 index 0000000000..ee1a058009 --- /dev/null +++ b/var/spack/repos/builtin/packages/everytrace/package.py @@ -0,0 +1,52 @@ +############################################################################## +# Copyright (c) 2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Everytrace(CMakePackage): + """Get stack trace EVERY time a program exits.""" + + homepage = "https://github.com/citibeth/everytrace" + url = "https://github.com/citibeth/everytrace/tarball/0.2.0" + + version('0.2.0', '2af0e5b6255064d5191accebaa70d222') + version('develop', + git='https://github.com/citibeth/everytrace.git', branch='develop') + + variant('mpi', default=True, description='Enables MPI parallelism') + variant('fortran', default=True, + description='Enable use with Fortran programs') + + depends_on('cmake', type='build') + depends_on('mpi', when='+mpi') + + def configure_args(self): + spec = self.spec + return [ + '-DUSE_MPI=%s' % ('YES' if '+mpi' in spec else 'NO'), + '-DUSE_FORTRAN=%s' % ('YES' if '+fortran' in spec else 'NO')] + + def setup_environment(self, spack_env, env): + env.prepend_path('PATH', join_path(self.prefix, 'bin')) diff --git a/var/spack/repos/builtin/packages/fenics/package.py b/var/spack/repos/builtin/packages/fenics/package.py index fd9a37df1b..1dc302fdff 100644 --- a/var/spack/repos/builtin/packages/fenics/package.py +++ b/var/spack/repos/builtin/packages/fenics/package.py @@ -143,7 +143,7 @@ class Fenics(Package): def install(self, spec, prefix): for package in ['ufl', 'ffc', 'fiat', 'instant']: with working_dir(join_path('depends', package)): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) cmake_args = [ '-DCMAKE_BUILD_TYPE:STRING={0}'.format( diff --git a/var/spack/repos/builtin/packages/flint/package.py b/var/spack/repos/builtin/packages/flint/package.py new file mode 100644 index 0000000000..c39b17db2c --- /dev/null +++ b/var/spack/repos/builtin/packages/flint/package.py @@ -0,0 +1,66 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Flint(Package): + """FLINT (Fast Library for Number Theory).""" + + homepage = "http://www.flintlib.org" + url = "http://mirrors.mit.edu/sage/spkg/upstream/flint/flint-2.5.2.tar.gz" + + version('2.5.2', 'cda885309362150196aed66a5e0f0383') + version('2.4.5', '6504b9deabeafb9313e57153a1730b33') + version('develop', git='https://github.com/wbhart/flint2.git') + + # Overlap in functionality between gmp and mpir + # All other dependencies must also be built with + # one or the other + # variant('mpir', default=False, + # description='Compile with the MPIR library') + + # Build dependencies + depends_on('autoconf', type='build') + + # Other dependencies + depends_on('gmp') # mpir is a drop-in replacement for this + depends_on('mpfr') # Could also be built against mpir + + def install(self, spec, prefix): + options = [] + options = ["--prefix=%s" % prefix, + "--with-gmp=%s" % spec['gmp'].prefix, + "--with-mpfr=%s" % spec['mpfr'].prefix] + + # if '+mpir' in spec: + # options.extend([ + # "--with-mpir=%s" % spec['mpir'].prefix + # ]) + + configure(*options) + make() + if self.run_tests: + make("check") + make("install") diff --git a/var/spack/repos/builtin/packages/gmp/package.py b/var/spack/repos/builtin/packages/gmp/package.py index ddc2dbb935..de872f09e0 100644 --- a/var/spack/repos/builtin/packages/gmp/package.py +++ b/var/spack/repos/builtin/packages/gmp/package.py @@ -40,4 +40,9 @@ class Gmp(AutotoolsPackage): depends_on('m4', type='build') def configure_args(self): - return ['--enable-cxx'] + args = ['--enable-cxx'] + # We need this flag if we want all the following checks to pass. + if spec.compiler.name == 'intel': + args.append('CXXFLAGS=-no-ftz') + + return args diff --git a/var/spack/repos/builtin/packages/gsl/package.py b/var/spack/repos/builtin/packages/gsl/package.py index 574d3b9402..da17c4a330 100644 --- a/var/spack/repos/builtin/packages/gsl/package.py +++ b/var/spack/repos/builtin/packages/gsl/package.py @@ -38,6 +38,7 @@ class Gsl(Package): homepage = "http://www.gnu.org/software/gsl" url = "http://mirror.switch.ch/ftp/mirror/gnu/gsl/gsl-2.1.tar.gz" + version('2.2.1', '3d90650b7cfe0a6f4b29c2d7b0f86458') version('2.1', 'd8f70abafd3e9f0bae03c52d1f4e8de5') version('2.0', 'ae44cdfed78ece40e73411b63a78c375') version('1.16', 'e49a664db13d81c968415cd53f62bc8b') diff --git a/var/spack/repos/builtin/packages/hdf/package.py b/var/spack/repos/builtin/packages/hdf/package.py index 9a44184256..2554bd0f96 100644 --- a/var/spack/repos/builtin/packages/hdf/package.py +++ b/var/spack/repos/builtin/packages/hdf/package.py @@ -34,13 +34,14 @@ class Hdf(Package): list_url = "https://www.hdfgroup.org/ftp/HDF/releases/" list_depth = 3 + version('4.2.12', '79fd1454c899c05e34a3da0456ab0c1c') version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19') variant('szip', default=False, description="Enable szip support") - depends_on('jpeg') + depends_on('jpeg@6b:') depends_on('szip', when='+szip') - depends_on('zlib') + depends_on('zlib@1.1.4:') depends_on('bison', type='build') depends_on('flex', type='build') @@ -48,9 +49,9 @@ class Hdf(Package): def install(self, spec, prefix): config_args = [ 'CFLAGS=-fPIC', - '--prefix=%s' % prefix, - '--with-jpeg=%s' % spec['jpeg'].prefix, - '--with-zlib=%s' % spec['zlib'].prefix, + '--prefix={0}'.format(prefix), + '--with-jpeg={0}'.format(spec['jpeg'].prefix), + '--with-zlib={0}'.format(spec['zlib'].prefix), '--disable-netcdf', # must be disabled to build NetCDF with HDF4 '--enable-fortran', '--disable-shared', # fortran and shared libs are not compatible @@ -58,12 +59,17 @@ class Hdf(Package): '--enable-production' ] - # SZip support + # Szip support if '+szip' in spec: - config_args.append('--with-szlib=%s' % spec['szip'].prefix) + config_args.append('--with-szlib={0}'.format(spec['szip'].prefix)) + else: + config_args.append('--without-szlib') configure(*config_args) make() - make('check') + + if self.run_tests: + make('check') + make('install') diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py index b5a7a65083..6b18aa4ab8 100644 --- a/var/spack/repos/builtin/packages/hdf5/package.py +++ b/var/spack/repos/builtin/packages/hdf5/package.py @@ -60,7 +60,7 @@ class Hdf5(AutotoolsPackage): depends_on('mpi', when='+mpi') depends_on('szip', when='+szip') - depends_on('zlib') + depends_on('zlib@1.1.2:') @AutotoolsPackage.precondition('configure') def validate(self): @@ -123,16 +123,14 @@ class Hdf5(AutotoolsPackage): # this is not actually a problem. extra_args.extend([ "--enable-parallel", - "CC=%s" % join_path(spec['mpi'].prefix.bin, "mpicc"), + "CC=%s" % spec['mpi'].mpicc ]) if '+cxx' in spec: - extra_args.append("CXX=%s" % join_path(spec['mpi'].prefix.bin, - "mpic++")) + extra_args.append("CXX=%s" % spec['mpi'].mpicxx) if '+fortran' in spec: - extra_args.append("FC=%s" % join_path(spec['mpi'].prefix.bin, - "mpifort")) + extra_args.append("FC=%s" % spec['mpi'].mpifc) if '+szip' in spec: extra_args.append("--with-szlib=%s" % spec['szip'].prefix) @@ -172,7 +170,7 @@ HDF5 version {version} {version} with open("check.c", 'w') as f: f.write(source) if '+mpi' in spec: - cc = which(join_path(spec['mpi'].prefix.bin, "mpicc")) + cc = which('%s' % spec['mpi'].mpicc) else: cc = which('cc') # TODO: Automate these path and library settings diff --git a/var/spack/repos/builtin/packages/hpx5/package.py b/var/spack/repos/builtin/packages/hpx5/package.py index 686e959719..fe75c256f8 100644 --- a/var/spack/repos/builtin/packages/hpx5/package.py +++ b/var/spack/repos/builtin/packages/hpx5/package.py @@ -37,15 +37,16 @@ class Hpx5(Package): applications enabling scientists to write code that performs and scales better than contemporary runtimes.""" homepage = "http://hpx.crest.iu.edu" - url = "http://hpx.crest.iu.edu/release/hpx-2.0.0.tar.gz" + url = "http://hpx.crest.iu.edu/release/hpx-3.1.0.tar.gz" + version('3.1.0', '9e90b8ac46788c009079632828c77628') version('2.0.0', '3d2ff3aab6c46481f9ec65c5b2bfe7a6') version('1.3.0', '2260ecc7f850e71a4d365a43017d8cee') version('1.2.0', '4972005f85566af4afe8b71afbf1480f') version('1.1.0', '646afb460ecb7e0eea713a634933ce4f') version('1.0.0', '8020822adf6090bd59ed7fe465f6c6cb') - variant('debug', default=False, description='Build a debug version of HPX-5') + variant('debug', default=False, description='Build debug version of HPX-5') variant('photon', default=False, description='Enable Photon support') variant('mpi', default=False, description='Enable MPI support') diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py index f22303a864..53c3b851b3 100644 --- a/var/spack/repos/builtin/packages/hypre/package.py +++ b/var/spack/repos/builtin/packages/hypre/package.py @@ -61,9 +61,9 @@ class Hypre(Package): configure_args = [ '--prefix=%s' % prefix, '--with-lapack-libs=%s' % ' '.join(lapack.names), - '--with-lapack-lib-dirs=%s' % spec['lapack'].prefix.lib, + '--with-lapack-lib-dirs=%s' % ' '.join(lapack.directories), '--with-blas-libs=%s' % ' '.join(blas.names), - '--with-blas-lib-dirs=%s' % spec['blas'].prefix.lib + '--with-blas-lib-dirs=%s' % ' '.join(blas.directories) ] if '+shared' in self.spec: diff --git a/var/spack/repos/builtin/packages/libcerf/package.py b/var/spack/repos/builtin/packages/libcerf/package.py index 7fb47f8dcd..1964f03b95 100644 --- a/var/spack/repos/builtin/packages/libcerf/package.py +++ b/var/spack/repos/builtin/packages/libcerf/package.py @@ -38,6 +38,12 @@ class Libcerf(Package): version('1.3', 'b3504c467204df71e62aeccf73a25612') def install(self, spec, prefix): - configure('--prefix=%s' % prefix) + options = [] + # Clang reports unused functions as errors, see + # http://clang.debian.net/status.php?version=3.8.1&key=UNUSED_FUNCTION + if spec.satisfies('%clang'): + options.append('CFLAGS=-Wno-unused-function') + + configure('--prefix=%s' % prefix, *options) make() make("install") diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py index 6252a88542..3fe159d7b9 100644 --- a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py +++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py @@ -26,20 +26,25 @@ from spack import * class LibjpegTurbo(Package): - """libjpeg-turbo is a fork of the original IJG libjpeg which uses - SIMD to accelerate baseline JPEG compression and - decompression. libjpeg is a library that implements JPEG image - encoding, decoding and transcoding.""" + """libjpeg-turbo is a fork of the original IJG libjpeg which uses SIMD to + accelerate baseline JPEG compression and decompression. libjpeg is a + library that implements JPEG image encoding, decoding and + transcoding.""" + homepage = "http://libjpeg-turbo.virtualgl.org" url = "http://downloads.sourceforge.net/libjpeg-turbo/libjpeg-turbo-1.3.1.tar.gz" + version('1.5.0', '3fc5d9b6a8bce96161659ae7a9939257') version('1.3.1', '2c3a68129dac443a72815ff5bb374b05') - # Can use either of these. - depends_on("yasm", type='build') + # Can use either of these. But in the current version of the package + # only nasm is used. In order to use yasm an environmental variable + # NASM must be set. + # TODO: Implement the selection between two supported assemblers. + # depends_on("yasm", type='build') depends_on("nasm", type='build') def install(self, spec, prefix): - configure("--prefix=%s" % prefix) + configure("--prefix=" + prefix) make() make("install") diff --git a/var/spack/repos/builtin/packages/libsplash/package.py b/var/spack/repos/builtin/packages/libsplash/package.py index 21a6eede3f..c20e6fe192 100644 --- a/var/spack/repos/builtin/packages/libsplash/package.py +++ b/var/spack/repos/builtin/packages/libsplash/package.py @@ -37,6 +37,10 @@ class Libsplash(Package): homepage = "https://github.com/ComputationalRadiationPhysics/libSplash" url = "https://github.com/ComputationalRadiationPhysics/libSplash/archive/v1.4.0.tar.gz" + version('dev', branch='dev', + git='https://github.com/ComputationalRadiationPhysics/libSplash.git') + version('master', branch='master', + git='https://github.com/ComputationalRadiationPhysics/libSplash.git') version('1.4.0', '2de37bcef6fafa1960391bf44b1b50e0') version('1.3.1', '524580ba088d97253d03b4611772f37c') version('1.2.4', '3fccb314293d22966beb7afd83b746d0') diff --git a/var/spack/repos/builtin/packages/libtiff/package.py b/var/spack/repos/builtin/packages/libtiff/package.py index cef9fcaae5..6c282dee7c 100644 --- a/var/spack/repos/builtin/packages/libtiff/package.py +++ b/var/spack/repos/builtin/packages/libtiff/package.py @@ -27,9 +27,10 @@ from spack import * class Libtiff(Package): """libtiff graphics format library""" - homepage = "http://www.remotesensing.org/libtiff/" - url = "http://download.osgeo.org/libtiff/tiff-4.0.3.tar.gz" + homepage = "http://www.simplesystems.org/libtiff/" + url = "ftp://download.osgeo.org/libtiff/tiff-4.0.3.tar.gz" + version('4.0.6', 'd1d2e940dea0b5ad435f21f03d96dd72') version('4.0.3', '051c1068e6a0627f461948c365290410') depends_on('jpeg') diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py index cbf9059f65..9aa4ab8cf0 100644 --- a/var/spack/repos/builtin/packages/mfem/package.py +++ b/var/spack/repos/builtin/packages/mfem/package.py @@ -23,8 +23,6 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * -import glob -import string class Mfem(Package): @@ -35,11 +33,11 @@ class Mfem(Package): version('3.2', '2938c3deed4ec4f7fd5b5f5cfe656845282e86e2dcd477d292390058b7b94340', - url='http://goo.gl/Y9T75B', expand=False, preferred=True) + url='http://goo.gl/Y9T75B', preferred=True, extension='.tar.gz') version('3.1', '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57', - url='http://goo.gl/xrScXn', expand=False) + url='http://goo.gl/xrScXn', extension='.tar.gz') # version('3.1', git='https://github.com/mfem/mfem.git', # commit='dbae60fe32e071989b52efaaf59d7d0eb2a3b574') @@ -48,8 +46,11 @@ class Mfem(Package): variant('suite-sparse', default=False, description='Activate support for SuiteSparse') variant('mpi', default=False, description='Activate support for MPI') + variant('superlu-dist', default=False, + description='Activate support for SuperLU_Dist') variant('lapack', default=False, description='Activate support for LAPACK') variant('debug', default=False, description='Build debug version') + variant('netcdf', default=False, description='Activate NetCDF support') depends_on('blas', when='+lapack') depends_on('lapack', when='+lapack') @@ -68,6 +69,12 @@ class Mfem(Package): depends_on('metis@5:', when='+suite-sparse ^suite-sparse@4.5:') depends_on('cmake', when='^metis@5:', type='build') + depends_on('superlu-dist', when='@3.2: +superlu-dist') + + depends_on('netcdf', when='@3.2: +netcdf') + depends_on('zlib', when='@3.2: +netcdf') + depends_on('hdf5', when='@3.2: +netcdf') + def check_variants(self, spec): if '+mpi' in spec and ('+hypre' not in spec or '+metis' not in spec): raise InstallError('mfem+mpi must be built with +hypre ' + @@ -81,6 +88,12 @@ class Mfem(Package): raise InstallError('To work around CMake bug with clang, must ' + 'build mfem with mfem[+variants] %clang ' + '^cmake %gcc to force CMake to build with gcc') + if '@:3.1' in spec and '+superlu-dist' in spec: + raise InstallError('MFEM does not support SuperLU_DIST for ' + + 'versions 3.1 and earlier') + if '@:3.1' in spec and '+netcdf' in spec: + raise InstallError('MFEM does not support NetCDF for versions' + + '3.1 and earlier') return def install(self, spec, prefix): @@ -102,7 +115,14 @@ class Mfem(Package): 'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib + ' -lHYPRE']) - if '+metis' in spec: + if 'parmetis' in spec: + metis_lib = '-L%s -lparmetis -lmetis' % spec['parmetis'].prefix.lib + metis_str = 'MFEM_USE_METIS_5=YES' + options.extend([metis_str, + 'METIS_DIR=%s' % spec['parmetis'].prefix, + 'METIS_OPT=-I%s' % spec['parmetis'].prefix.include, + 'METIS_LIB=%s' % metis_lib]) + elif 'metis' in spec: metis_lib = '-L%s -lmetis' % spec['metis'].prefix.lib if spec['metis'].satisfies('@5:'): metis_str = 'MFEM_USE_METIS_5=YES' @@ -114,14 +134,27 @@ class Mfem(Package): 'METIS_OPT=-I%s' % spec['metis'].prefix.include, 'METIS_LIB=%s' % metis_lib]) - if '+mpi' in spec: + if 'mpi' in spec: options.extend(['MFEM_USE_MPI=YES']) + if '+superlu-dist' in spec: + superlu_lib = '-L%s' % spec['superlu-dist'].prefix.lib + superlu_lib += ' -lsuperlu_dist' + sl_inc = 'SUPERLU_OPT=-I%s' % spec['superlu-dist'].prefix.include + options.extend(['MFEM_USE_SUPERLU=YES', + 'SUPERLU_DIR=%s' % spec['superlu-dist'].prefix, + sl_inc, + 'SUPERLU_LIB=%s' % superlu_lib]) + if '+suite-sparse' in spec: ssp = spec['suite-sparse'].prefix ss_lib = '-L%s' % ssp.lib - ss_lib += (' -lumfpack -lcholmod -lcolamd -lamd -lcamd' + - ' -lccolamd -lsuitesparseconfig') + + if '@3.2:' in spec: + ss_lib += ' -lklu -lbtf' + + ss_lib += (' -lumfpack -lcholmod -lcolamd' + + ' -lamd -lcamd -lccolamd -lsuitesparseconfig') no_librt_archs = ['darwin-i686', 'darwin-x86_64'] no_rt = any(map(lambda a: spec.satisfies('=' + a), @@ -135,16 +168,23 @@ class Mfem(Package): 'SUITESPARSE_OPT=-I%s' % ssp.include, 'SUITESPARSE_LIB=%s' % ss_lib]) + if '+netcdf' in spec: + np = spec['netcdf'].prefix + zp = spec['zlib'].prefix + h5p = spec['hdf5'].prefix + nlib = '-L%s -lnetcdf ' % np.lib + nlib += '-L%s -lhdf5_hl -lhdf5 ' % h5p.lib + nlib += '-L%s -lz' % zp.lib + options.extend(['MFEM_USE_NETCDF=YES', + 'NETCDF_DIR=%s' % np, + 'HDF5_DIR=%s' % h5p, + 'ZLIB_DIR=%s' % zp, + 'NETCDF_OPT=-I%s' % np.include, + 'NETCDF_LIB=%s' % nlib]) + if '+debug' in spec: options.extend(['MFEM_DEBUG=YES']) - # Dirty hack to cope with URL redirect - tgz_file = string.split(self.url, '/')[-1] - tar = which('tar') - tar('xzvf', tgz_file) - cd(glob.glob('mfem*')[0]) - # End dirty hack to cope with URL redirect - make('config', *options) make('all') diff --git a/var/spack/repos/builtin/packages/mkl/package.py b/var/spack/repos/builtin/packages/mkl/package.py index 8a31858bf5..4a9a8785f7 100644 --- a/var/spack/repos/builtin/packages/mkl/package.py +++ b/var/spack/repos/builtin/packages/mkl/package.py @@ -66,13 +66,6 @@ class Mkl(IntelInstaller): for f in os.listdir(mkl_dir): os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f)) - # Unfortunately MKL libs are natively distrubted in prefix/lib/intel64. - # To make MKL play nice with Spack, symlink all files to prefix/lib: - mkl_lib_dir = os.path.join(prefix, "lib", "intel64") - for f in os.listdir(mkl_lib_dir): - os.symlink(os.path.join(mkl_lib_dir, f), - os.path.join(self.prefix, "lib", f)) - def setup_dependent_environment(self, spack_env, run_env, dependent_spec): # set up MKLROOT for everyone using MKL package spack_env.set('MKLROOT', self.prefix) diff --git a/var/spack/repos/builtin/packages/mpc/package.py b/var/spack/repos/builtin/packages/mpc/package.py index 71cacd5dfe..2fe3900981 100644 --- a/var/spack/repos/builtin/packages/mpc/package.py +++ b/var/spack/repos/builtin/packages/mpc/package.py @@ -35,12 +35,12 @@ class Mpc(Package): version('1.0.3', 'd6a1d5f8ddea3abd2cc3e98f58352d26') version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3') - depends_on("gmp") - depends_on("mpfr") + depends_on('gmp') # mpir is a drop-in replacement for this + depends_on('mpfr') # Could also be built against mpir def url_for_version(self, version): if version < Version("1.0.1"): - return "http://www.multiprecision.org/mpc/download/mpc-%s.tar.gz" % version + return "http://www.multiprecision.org/mpc/download/mpc-%s.tar.gz" % version # NOQA else: return "ftp://ftp.gnu.org/gnu/mpc/mpc-%s.tar.gz" % version diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py index 5777cd1926..4612d03849 100644 --- a/var/spack/repos/builtin/packages/mpfr/package.py +++ b/var/spack/repos/builtin/packages/mpfr/package.py @@ -35,7 +35,7 @@ class Mpfr(Package): version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138') version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19') - depends_on('gmp') + depends_on('gmp') # mpir is a drop-in replacement for this def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py index a36ab4206e..bb034f9fc7 100644 --- a/var/spack/repos/builtin/packages/mpich/package.py +++ b/var/spack/repos/builtin/packages/mpich/package.py @@ -27,9 +27,10 @@ from spack import * class Mpich(Package): """MPICH is a high performance and widely portable implementation of - the Message Passing Interface (MPI) standard.""" + the Message Passing Interface (MPI) standard.""" + homepage = "http://www.mpich.org" - url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" + url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz" list_url = "http://www.mpich.org/static/downloads/" list_depth = 2 @@ -41,10 +42,10 @@ class Mpich(Package): version('3.1', '5643dd176499bfb7d25079aaff25f2ec') version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0') - variant('verbs', default=False, - description='Build support for OpenFabrics verbs.') - variant('pmi', default=True, description='Build with PMI support') - variant('hydra', default=True, description='Build the hydra process manager') + variant('hydra', default=True, description='Build the hydra process manager') + variant('pmi', default=True, description='Build with PMI support') + variant('romio', default=True, description='Enable ROMIO MPI I/O implementation') + variant('verbs', default=False, description='Build support for OpenFabrics verbs.') provides('mpi@:3.0', when='@3:') provides('mpi@:1.3', when='@1:') @@ -62,26 +63,32 @@ class Mpich(Package): spack_env.set('MPICH_FC', spack_fc) def setup_dependent_package(self, module, dep_spec): - self.spec.mpicc = join_path(self.prefix.bin, 'mpicc') - self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++') - self.spec.mpifc = join_path(self.prefix.bin, 'mpif90') - self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77') + # Is this a Cray machine? (TODO: We need a better test than this.) + if os.environ.get('CRAYPE_VERSION'): + self.spec.mpicc = spack_cc + self.spec.mpicxx = spack_cxx + self.spec.mpifc = spack_fc + self.spec.mpif77 = spack_f77 + else: + self.spec.mpicc = join_path(self.prefix.bin, 'mpicc') + self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++') + self.spec.mpifc = join_path(self.prefix.bin, 'mpif90') + self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77') + self.spec.mpicxx_shared_libs = [ join_path(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)), join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix)) ] def install(self, spec, prefix): - config_args = ["--prefix=" + prefix, - "--with-pmi=" + ("yes" if '+pmi' in spec else 'no'), - "--with-pm=" + ('hydra' if '+hydra' in spec else 'no'), - "--enable-shared"] - - # Variants - if '+verbs' in spec: - config_args.append("--with-ibverbs") - else: - config_args.append("--without-ibverbs") + config_args = [ + '--prefix={0}'.format(prefix), + '--enable-shared', + '--with-pm={0}'.format('hydra' if '+hydra' in spec else 'no'), + '--with-pmi={0}'.format('yes' if '+pmi' in spec else 'no'), + '--{0}-romio'.format('enable' if '+romio' in spec else 'disable'), + '--{0}-ibverbs'.format('with' if '+verbs' in spec else 'without') + ] # TODO: Spack should make it so that you can't actually find # these compilers if they're "disabled" for the current @@ -96,32 +103,33 @@ class Mpich(Package): config_args.append("--disable-fortran") configure(*config_args) + make() - make("install") + make('check') + make('install') - self.filter_compilers() + self.filter_compilers(prefix) - def filter_compilers(self): + def filter_compilers(self, prefix): """Run after install to make the MPI compilers use the - compilers that Spack built the package with. - - If this isn't done, they'll have CC, CXX, F77, and FC set - to Spack's generic cc, c++, f77, and f90. We want them to - be bound to whatever compiler they were built with. - """ - bin = self.prefix.bin - mpicc = join_path(bin, 'mpicc') - mpicxx = join_path(bin, 'mpicxx') - mpif77 = join_path(bin, 'mpif77') - mpif90 = join_path(bin, 'mpif90') + compilers that Spack built the package with. + + If this isn't done, they'll have CC, CXX, F77, and FC set + to Spack's generic cc, c++, f77, and f90. We want them to + be bound to whatever compiler they were built with.""" + + mpicc = join_path(prefix.bin, 'mpicc') + mpicxx = join_path(prefix.bin, 'mpicxx') + mpif77 = join_path(prefix.bin, 'mpif77') + mpif90 = join_path(prefix.bin, 'mpif90') # Substitute Spack compile wrappers for the real # underlying compiler kwargs = {'ignore_absent': True, 'backup': False, 'string': True} - filter_file(env['CC'], self.compiler.cc, mpicc, **kwargs) + filter_file(env['CC'], self.compiler.cc, mpicc, **kwargs) filter_file(env['CXX'], self.compiler.cxx, mpicxx, **kwargs) filter_file(env['F77'], self.compiler.f77, mpif77, **kwargs) - filter_file(env['FC'], self.compiler.fc, mpif90, **kwargs) + filter_file(env['FC'], self.compiler.fc, mpif90, **kwargs) # Remove this linking flag if present # (it turns RPATH into RUNPATH) diff --git a/var/spack/repos/builtin/packages/mpir/package.py b/var/spack/repos/builtin/packages/mpir/package.py new file mode 100644 index 0000000000..b939a690b2 --- /dev/null +++ b/var/spack/repos/builtin/packages/mpir/package.py @@ -0,0 +1,62 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Mpir(Package): + """Multiple Precision Integers and Rationals.""" + + homepage = "https://github.com/wbhart/mpir" + url = "https://github.com/wbhart/mpir/archive/mpir-2.7.0.tar.gz" + + version('2.7.0', '985b5d57bd0e74c74125ee885b9c8f71') + version('2.6.0', 'ec17d6a7e026114ceb734b2466aa0a91') + version('develop', git='https://github.com/wbhart/mpir.git') + + # This setting allows mpir to act as a drop-in replacement for gmp + variant('gmp_compat', default=False, + description='Compile with GMP library compatibility') + + # Build dependencies + depends_on('autoconf', type='build') + + # Other dependencies + depends_on('yasm') + + def install(self, spec, prefix): + # We definitely don't want to have MPIR build its + # own version of YASM. This tries to install it + # to a system directory. + options = ['--prefix={0}'.format(prefix), + '--with-system-yasm'] + + if '+gmp_compat' in spec: + options.extend(['--enable-gmpcompat']) + + configure(*options) + make() + if self.run_tests: + make('check') + make('install') diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py index 3466f091a0..e0d7a00c55 100644 --- a/var/spack/repos/builtin/packages/mumps/package.py +++ b/var/spack/repos/builtin/packages/mumps/package.py @@ -34,6 +34,8 @@ class Mumps(Package): homepage = "http://mumps.enseeiht.fr" url = "http://mumps.enseeiht.fr/MUMPS_5.0.1.tar.gz" + # Alternate location if main server is down. + # version('5.0.1', 'b477573fdcc87babe861f62316833db0', url='http://pkgs.fedoraproject.org/repo/pkgs/MUMPS/MUMPS_5.0.1.tar.gz/md5/b477573fdcc87babe861f62316833db0/MUMPS_5.0.1.tar.gz') version('5.0.1', 'b477573fdcc87babe861f62316833db0') variant('mpi', default=True, diff --git a/var/spack/repos/builtin/packages/nco/package.py b/var/spack/repos/builtin/packages/nco/package.py index a25d69d9f6..28dfe8f059 100644 --- a/var/spack/repos/builtin/packages/nco/package.py +++ b/var/spack/repos/builtin/packages/nco/package.py @@ -32,6 +32,7 @@ class Nco(Package): homepage = "https://sourceforge.net/projects/nco" url = "https://github.com/nco/nco/archive/4.5.5.tar.gz" + version('4.6.1', 'ef43cc989229c2790a9094bd84728fd8') version('4.5.5', '9f1f1cb149ad6407c5a03c20122223ce') # See "Compilation Requirements" at: @@ -39,18 +40,21 @@ class Nco(Package): variant('mpi', default=True) depends_on('netcdf') - depends_on('netcdf+mpi', when='+mpi') - depends_on('netcdf~mpi', when='~mpi') - depends_on('antlr@2.7.7+cxx') # (required for ncap2) - depends_on('gsl') # (desirable for ncap2) - depends_on('udunits2') # (allows dimensional unit transformations) - # depends_on('opendap') # (enables network transparency), + depends_on('antlr@2.7.7+cxx') # required for ncap2 + depends_on('gsl') # desirable for ncap2 + depends_on('udunits2') # allows dimensional unit transformations + # depends_on('opendap') # enables network transparency def install(self, spec, prefix): + # Workaround until variant forwarding works properly + if '+mpi' in spec and spec.satisfies('^netcdf~mpi'): + raise RuntimeError('Invalid spec. Package netcdf requires ' + 'netcdf+mpi, but spec asked for netcdf~mpi.') + opts = [ '--prefix=%s' % prefix, - '--disable-openmp', # TODO: Make this a variant - '--disable-dap', # TODO: Make this a variant + '--disable-openmp', # TODO: Make this a variant + '--disable-dap', # TODO: Make this a variant '--disable-esmf'] configure(*opts) make() diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py index 5dc686c907..d4d7fd1691 100644 --- a/var/spack/repos/builtin/packages/netcdf/package.py +++ b/var/spack/repos/builtin/packages/netcdf/package.py @@ -46,10 +46,10 @@ class Netcdf(Package): depends_on("hdf", when='+hdf4') # Required for DAP support - depends_on("curl") + depends_on("curl@7.18.0:") # Required for NetCDF-4 support - depends_on("zlib") + depends_on("zlib@1.2.5:") depends_on('hdf5') # NetCDF 4.4.0 and prior have compatibility issues with HDF5 1.10 and later @@ -105,7 +105,7 @@ class Netcdf(Package): LDFLAGS.append("-L%s/lib" % spec['hdf'].prefix) LIBS.append("-l%s" % "jpeg") - if 'szip' in spec: + if '+szip' in spec: CPPFLAGS.append("-I%s/include" % spec['szip'].prefix) LDFLAGS.append("-L%s/lib" % spec['szip'].prefix) LIBS.append("-l%s" % "sz") @@ -120,4 +120,8 @@ class Netcdf(Package): configure(*config_args) make() + + if self.run_tests: + make("check") + make("install") diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py index b4c873a8a1..7e2b758bc0 100644 --- a/var/spack/repos/builtin/packages/nettle/package.py +++ b/var/spack/repos/builtin/packages/nettle/package.py @@ -36,6 +36,7 @@ class Nettle(Package): version('2.7', '2caa1bd667c35db71becb93c5d89737f') depends_on('gmp') + depends_on('m4', type='build') def install(self, spec, prefix): configure('--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py index 75c4ac807d..5cf3edb8da 100644 --- a/var/spack/repos/builtin/packages/pango/package.py +++ b/var/spack/repos/builtin/packages/pango/package.py @@ -41,6 +41,7 @@ class Pango(Package): depends_on("pkg-config", type="build") depends_on("harfbuzz") depends_on("cairo") + depends_on("glib") def install(self, spec, prefix): configure("--prefix=%s" % prefix) diff --git a/var/spack/repos/builtin/packages/piranha/package.py b/var/spack/repos/builtin/packages/piranha/package.py new file mode 100644 index 0000000000..dbf949f000 --- /dev/null +++ b/var/spack/repos/builtin/packages/piranha/package.py @@ -0,0 +1,73 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Piranha(Package): + """Piranha is a computer-algebra library for the symbolic manipulation of + sparse multivariate polynomials and other closely-related symbolic objects + (such as Poisson series).""" + + homepage = "https://bluescarni.github.io/piranha/sphinx/" + url = "https://github.com/bluescarni/piranha/archive/v0.5.tar.gz" + + version('0.5', '99546bae2be115737b6316751eb0b84d') + version('develop', git='https://github.com/bluescarni/piranha.git') + + variant('python', default=True, + description='Build the Python bindings') + + # Build dependencies + depends_on('cmake@3.0:', type='build') + extends('python', when='+pyranha') + depends_on('python@2.6:', type='build', when='+pyranha') + + # Other dependencies + depends_on('boost+iostreams+regex+serialization', + when='~python') + depends_on('boost+iostreams+regex+serialization+python', + when='+python') + depends_on('bzip2') + depends_on('gmp') # mpir is a drop-in replacement for this + depends_on('mpfr') # Could also be built against mpir + + def install(self, spec, prefix): + options = [] + options.extend(std_cmake_args) + + # Python bindings + options.extend([ + '-DBUILD_PYRANHA=%s' % ( + 'ON' if '+python' in spec else 'OFF'), + '-DBUILD_TESTS:BOOL=ON', + ]) + + with working_dir('spack-build', create=True): + cmake('..', *options) + + make() + make('install') + if self.run_tests: + make('test') diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py index 41aad0c90a..8fe3e1f9d9 100644 --- a/var/spack/repos/builtin/packages/pixman/package.py +++ b/var/spack/repos/builtin/packages/pixman/package.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * +import sys class Pixman(Package): @@ -38,8 +39,13 @@ class Pixman(Package): depends_on("libpng") def install(self, spec, prefix): - configure("--prefix=%s" % prefix, - "--disable-mmx", - "--disable-gtk") + config_args = ["--prefix=" + prefix, + "--disable-gtk"] + + if sys.platform == "darwin": + config_args.append("--disable-mmx") + + configure(*config_args) + make() make("install") diff --git a/var/spack/repos/builtin/packages/pngwriter/package.py b/var/spack/repos/builtin/packages/pngwriter/package.py index c51f1f82a8..4c0370a7ef 100644 --- a/var/spack/repos/builtin/packages/pngwriter/package.py +++ b/var/spack/repos/builtin/packages/pngwriter/package.py @@ -38,6 +38,10 @@ class Pngwriter(Package): homepage = "http://pngwriter.sourceforge.net/" url = "https://github.com/pngwriter/pngwriter/archive/0.5.6.tar.gz" + version('dev', branch='dev', + git='https://github.com/pngwriter/pngwriter.git') + version('master', branch='master', + git='https://github.com/pngwriter/pngwriter.git') version('0.5.6', 'c13bd1fdc0e331a246e6127b5f262136') depends_on('cmake', type='build') diff --git a/var/spack/repos/builtin/packages/py-3to2/package.py b/var/spack/repos/builtin/packages/py-3to2/package.py index 1071a3c209..d0b6857aaf 100644 --- a/var/spack/repos/builtin/packages/py-3to2/package.py +++ b/var/spack/repos/builtin/packages/py-3to2/package.py @@ -37,4 +37,4 @@ class Py3to2(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py b/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py index ca59105b4c..73e144b00e 100644 --- a/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py +++ b/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py @@ -36,4 +36,4 @@ class PySqlalchemy(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-argcomplete/package.py b/var/spack/repos/builtin/packages/py-argcomplete/package.py index 2549972a6d..64312da9dc 100644 --- a/var/spack/repos/builtin/packages/py-argcomplete/package.py +++ b/var/spack/repos/builtin/packages/py-argcomplete/package.py @@ -36,4 +36,4 @@ class PyArgcomplete(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-astroid/package.py b/var/spack/repos/builtin/packages/py-astroid/package.py index a47b7ffa33..ff5683c705 100644 --- a/var/spack/repos/builtin/packages/py-astroid/package.py +++ b/var/spack/repos/builtin/packages/py-astroid/package.py @@ -42,4 +42,4 @@ class PyAstroid(Package): depends_on('py-six', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-astropy/package.py b/var/spack/repos/builtin/packages/py-astropy/package.py index 25dce87e49..1565db0bdd 100644 --- a/var/spack/repos/builtin/packages/py-astropy/package.py +++ b/var/spack/repos/builtin/packages/py-astropy/package.py @@ -56,6 +56,5 @@ class PyAstropy(Package): depends_on('expat') def install(self, spec, prefix): - python('setup.py', 'build', '--use-system-cfitsio', - '--use-system-expat') - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('build', '--use-system-cfitsio', '--use-system-expat') + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-autopep8/package.py b/var/spack/repos/builtin/packages/py-autopep8/package.py index f6c08e2728..6d093f32f6 100644 --- a/var/spack/repos/builtin/packages/py-autopep8/package.py +++ b/var/spack/repos/builtin/packages/py-autopep8/package.py @@ -1,16 +1,53 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## from spack import * class PyAutopep8(Package): - """Automatic pep8 formatter""" + """autopep8 automatically formats Python code to conform to the + PEP 8 style guide.""" + homepage = "https://github.com/hhatto/autopep8" - url = "https://github.com/hhatto/autopep8/archive/ver1.2.2.tar.gz" + url = "https://github.com/hhatto/autopep8/archive/v1.2.4.tar.gz" + version('1.2.4', '0458db85159a9e1b45f3e71ce6c158da') version('1.2.2', 'def3d023fc9dfd1b7113602e965ad8e1') - extends('python') + extends('python', ignore='bin/pep8') + depends_on('python@2.6:2.7,3.2:') + + depends_on('py-pycodestyle@1.5.7:1.7.0', type=nolink) + depends_on('py-setuptools', type='build') - depends_on('py-pep8', type=nolink) + + def url_for_version(self, version): + url = "https://github.com/hhatto/autopep8/archive/{0}{1}.tar.gz" + if version >= Version('1.2.3'): + return url.format('v', version) + else: + return url.format('ver', version) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-basemap/package.py b/var/spack/repos/builtin/packages/py-basemap/package.py index 9c7f6454df..5b0026eda9 100644 --- a/var/spack/repos/builtin/packages/py-basemap/package.py +++ b/var/spack/repos/builtin/packages/py-basemap/package.py @@ -43,4 +43,4 @@ class PyBasemap(Package): def install(self, spec, prefix): env['GEOS_DIR'] = spec['geos'].prefix - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py b/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py index d3a260bd7f..71014b5bdb 100644 --- a/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py +++ b/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py @@ -40,4 +40,4 @@ class PyBeautifulsoup4(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-biopython/package.py b/var/spack/repos/builtin/packages/py-biopython/package.py index c3edf9735b..9e1b693a0d 100644 --- a/var/spack/repos/builtin/packages/py-biopython/package.py +++ b/var/spack/repos/builtin/packages/py-biopython/package.py @@ -41,4 +41,4 @@ class PyBiopython(Package): depends_on('py-numpy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-blessings/package.py b/var/spack/repos/builtin/packages/py-blessings/package.py index e6fc6aa983..700c0ff4b3 100644 --- a/var/spack/repos/builtin/packages/py-blessings/package.py +++ b/var/spack/repos/builtin/packages/py-blessings/package.py @@ -37,4 +37,4 @@ class PyBlessings(Package): extends("python") def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-bottleneck/package.py b/var/spack/repos/builtin/packages/py-bottleneck/package.py index ad2ee749d3..20d78846bb 100644 --- a/var/spack/repos/builtin/packages/py-bottleneck/package.py +++ b/var/spack/repos/builtin/packages/py-bottleneck/package.py @@ -36,4 +36,4 @@ class PyBottleneck(Package): depends_on('py-numpy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-cffi/package.py b/var/spack/repos/builtin/packages/py-cffi/package.py index 7c08e51de8..5cd983b343 100644 --- a/var/spack/repos/builtin/packages/py-cffi/package.py +++ b/var/spack/repos/builtin/packages/py-cffi/package.py @@ -49,4 +49,4 @@ class PyCffi(Package): # building the shared library. os.environ['LDSHARED'] = "{0} -shared -pthread".format(spack_cc) - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-configparser/package.py b/var/spack/repos/builtin/packages/py-configparser/package.py new file mode 100644 index 0000000000..1ad4ed619e --- /dev/null +++ b/var/spack/repos/builtin/packages/py-configparser/package.py @@ -0,0 +1,46 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PyConfigparser(Package): + """This library brings the updated configparser from Python 3.5 to + Python 2.6-3.5.""" + + homepage = "https://pypi.python.org/pypi/configparser" + url = "https://pypi.python.org/packages/source/c/configparser/configparser-3.5.0.tar.gz" + + version('3.5.0', 'cfdd915a5b7a6c09917a64a573140538', + url="https://pypi.python.org/packages/7c/69/c2ce7e91c89dc073eb1aa74c0621c3eefbffe8216b3f9af9d3885265c01c/configparser-3.5.0.tar.gz") + + extends('python') + depends_on('python@2.6:2.7,3.4:') + + depends_on('py-ordereddict', when='^python@2.6:2.6.999', type=nolink) + + depends_on('py-setuptools', type='build') + + def install(self, spec, prefix): + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-coverage/package.py b/var/spack/repos/builtin/packages/py-coverage/package.py index 1a5b6df3d7..8cd8440b7e 100644 --- a/var/spack/repos/builtin/packages/py-coverage/package.py +++ b/var/spack/repos/builtin/packages/py-coverage/package.py @@ -38,4 +38,4 @@ class PyCoverage(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-csvkit/package.py b/var/spack/repos/builtin/packages/py-csvkit/package.py index 5f50e3b6c2..5b0394a138 100644 --- a/var/spack/repos/builtin/packages/py-csvkit/package.py +++ b/var/spack/repos/builtin/packages/py-csvkit/package.py @@ -44,4 +44,4 @@ class PyCsvkit(Package): depends_on('py-openpyxl', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py index 4b3e1cabe1..e532286081 100644 --- a/var/spack/repos/builtin/packages/py-cython/package.py +++ b/var/spack/repos/builtin/packages/py-cython/package.py @@ -40,4 +40,4 @@ class PyCython(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-dask/package.py b/var/spack/repos/builtin/packages/py-dask/package.py index 4bc2c6fc99..c72046b627 100644 --- a/var/spack/repos/builtin/packages/py-dask/package.py +++ b/var/spack/repos/builtin/packages/py-dask/package.py @@ -37,4 +37,4 @@ class PyDask(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-dateutil/package.py b/var/spack/repos/builtin/packages/py-dateutil/package.py index 40945232c1..f147e2357b 100644 --- a/var/spack/repos/builtin/packages/py-dateutil/package.py +++ b/var/spack/repos/builtin/packages/py-dateutil/package.py @@ -39,4 +39,4 @@ class PyDateutil(Package): depends_on('py-six', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-dbf/package.py b/var/spack/repos/builtin/packages/py-dbf/package.py index 7f83bce75e..eff893cc82 100644 --- a/var/spack/repos/builtin/packages/py-dbf/package.py +++ b/var/spack/repos/builtin/packages/py-dbf/package.py @@ -37,4 +37,4 @@ class PyDbf(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-decorator/package.py b/var/spack/repos/builtin/packages/py-decorator/package.py index 9101b07a0d..30f764edc3 100644 --- a/var/spack/repos/builtin/packages/py-decorator/package.py +++ b/var/spack/repos/builtin/packages/py-decorator/package.py @@ -39,4 +39,4 @@ class PyDecorator(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-emcee/package.py b/var/spack/repos/builtin/packages/py-emcee/package.py index 397f737cb1..2045ec9d59 100644 --- a/var/spack/repos/builtin/packages/py-emcee/package.py +++ b/var/spack/repos/builtin/packages/py-emcee/package.py @@ -38,4 +38,4 @@ class PyEmcee(Package): depends_on('py-numpy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-enum34/package.py b/var/spack/repos/builtin/packages/py-enum34/package.py new file mode 100644 index 0000000000..2f294bac12 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-enum34/package.py @@ -0,0 +1,45 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PyEnum34(Package): + """Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4.""" + + homepage = "https://pypi.python.org/pypi/enum34" + url = "https://pypi.python.org/packages/source/e/enum34/enum34-1.1.6.tar.gz" + + version('1.1.6', '5f13a0841a61f7fc295c514490d120d0', + url="https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz") + + extends('python') + depends_on('python@2.4:2.8,3.3:') + + depends_on('py-ordereddict', when='^python@:2.6.999', type=nolink) + + depends_on('py-setuptools', type='build') + + def install(self, spec, prefix): + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-epydoc/package.py b/var/spack/repos/builtin/packages/py-epydoc/package.py index 9d4b93dad4..ed490cb396 100644 --- a/var/spack/repos/builtin/packages/py-epydoc/package.py +++ b/var/spack/repos/builtin/packages/py-epydoc/package.py @@ -36,4 +36,4 @@ class PyEpydoc(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-flake8/package.py b/var/spack/repos/builtin/packages/py-flake8/package.py index 2fabe03d66..8bb36bc02f 100644 --- a/var/spack/repos/builtin/packages/py-flake8/package.py +++ b/var/spack/repos/builtin/packages/py-flake8/package.py @@ -28,13 +28,40 @@ from spack import * class PyFlake8(Package): """Flake8 is a wrapper around PyFlakes, pep8 and Ned Batchelder's McCabe script.""" - homepage = "http://flake8.readthedocs.io/en/latest/" - url = "https://pypi.python.org/packages/source/f/flake8/flake8-2.5.4.tar.gz" - version('2.5.4', 'a4585b3569b95c3f66acb8294a7f06ef') + homepage = "https://github.com/PyCQA/flake8" + url = "https://github.com/PyCQA/flake8/archive/3.0.4.tar.gz" - extends('python') - depends_on('py-setuptools', type='build') + version('3.0.4', 'cf2a7d8c92070f7b62253404ffb54df7') + version('2.5.4', '366dd1de6c300254c830b81e66979f06') + + extends('python', ignore='bin/(pyflakes|pycodestyle)') + depends_on('python@2.7:2.8,3.4:') + + # Most Python packages only require py-setuptools as a build dependency. + # However, py-flake8 requires py-setuptools during runtime as well. + depends_on('py-setuptools', type=nolink) + + # pyflakes >= 0.8.1, != 1.2.0, != 1.2.1, != 1.2.2, < 1.3.0 + depends_on('py-pyflakes@0.8.1:1.1.0,1.2.3:1.2.3', when='@3.0.4', type=nolink) # noqa + # pyflakes >= 0.8.1, < 1.1 + depends_on('py-pyflakes@0.8.1:1.0.0', when='@2.5.4', type=nolink) + + # pycodestyle >= 2.0.0, < 2.1.0 + depends_on('py-pycodestyle@2.0.0:2.0.999', when='@3.0.4', type=nolink) + # pep8 >= 1.5.7, != 1.6.0, != 1.6.1, != 1.6.2 + depends_on('py-pycodestyle@1.5.7,1.7.0:', when='@2.5.4', type=nolink) + + # mccabe >= 0.5.0, < 0.6.0 + depends_on('py-mccabe@0.5.0:0.5.999', when='@3.0.4', type=nolink) + # mccabe >= 0.2.1, < 0.5 + depends_on('py-mccabe@0.2.1:0.4.0', when='@2.5.4', type=nolink) + + depends_on('py-configparser', when='^python@:3.3.999', type=nolink) + depends_on('py-enum34', when='^python@:3.1.999', type=nolink) + + # TODO: Add test dependencies + # depends_on('py-nose', type='test') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-funcsigs/package.py b/var/spack/repos/builtin/packages/py-funcsigs/package.py index c3d37f6b98..b82a37cae9 100644 --- a/var/spack/repos/builtin/packages/py-funcsigs/package.py +++ b/var/spack/repos/builtin/packages/py-funcsigs/package.py @@ -37,4 +37,4 @@ class PyFuncsigs(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-genshi/package.py b/var/spack/repos/builtin/packages/py-genshi/package.py index ea8d26c796..e7eb7bebdd 100644 --- a/var/spack/repos/builtin/packages/py-genshi/package.py +++ b/var/spack/repos/builtin/packages/py-genshi/package.py @@ -39,4 +39,4 @@ class PyGenshi(Package): depends_on("py-setuptools", type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-gnuplot/package.py b/var/spack/repos/builtin/packages/py-gnuplot/package.py index a448a66e51..cc273103f2 100644 --- a/var/spack/repos/builtin/packages/py-gnuplot/package.py +++ b/var/spack/repos/builtin/packages/py-gnuplot/package.py @@ -37,4 +37,4 @@ class PyGnuplot(Package): depends_on('py-numpy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py index 90a67c51bd..0579787ba2 100644 --- a/var/spack/repos/builtin/packages/py-h5py/package.py +++ b/var/spack/repos/builtin/packages/py-h5py/package.py @@ -56,11 +56,10 @@ class PyH5py(Package): depends_on('py-six', type=nolink) def install(self, spec, prefix): - python('setup.py', 'configure', - '--hdf5={0}'.format(spec['hdf5'].prefix)) + setup_py('configure', '--hdf5={0}'.format(spec['hdf5'].prefix)) if '+mpi' in spec: env['CC'] = spec['mpi'].mpicc - python('setup.py', 'configure', '--mpi') + setup_py('configure', '--mpi') - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-iminuit/package.py b/var/spack/repos/builtin/packages/py-iminuit/package.py index d7446c06d4..c58b722e3b 100644 --- a/var/spack/repos/builtin/packages/py-iminuit/package.py +++ b/var/spack/repos/builtin/packages/py-iminuit/package.py @@ -43,4 +43,4 @@ class PyIminuit(Package): depends_on('py-cython', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-ipython/package.py b/var/spack/repos/builtin/packages/py-ipython/package.py index ac3291e21e..d7ee2e3c95 100644 --- a/var/spack/repos/builtin/packages/py-ipython/package.py +++ b/var/spack/repos/builtin/packages/py-ipython/package.py @@ -39,4 +39,4 @@ class PyIpython(Package): depends_on('py-setuptools', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-jdcal/package.py b/var/spack/repos/builtin/packages/py-jdcal/package.py index 32acf75131..60ee91c9b6 100644 --- a/var/spack/repos/builtin/packages/py-jdcal/package.py +++ b/var/spack/repos/builtin/packages/py-jdcal/package.py @@ -36,4 +36,4 @@ class PyJdcal(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-lockfile/package.py b/var/spack/repos/builtin/packages/py-lockfile/package.py index b873625bdb..856276ec89 100644 --- a/var/spack/repos/builtin/packages/py-lockfile/package.py +++ b/var/spack/repos/builtin/packages/py-lockfile/package.py @@ -45,4 +45,4 @@ class PyLockfile(Package): depends_on("py-setuptools", type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-logilab-common/package.py b/var/spack/repos/builtin/packages/py-logilab-common/package.py index ac1b933e43..e66cdc8479 100644 --- a/var/spack/repos/builtin/packages/py-logilab-common/package.py +++ b/var/spack/repos/builtin/packages/py-logilab-common/package.py @@ -38,4 +38,4 @@ class PyLogilabCommon(Package): depends_on("py-six", type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mako/package.py b/var/spack/repos/builtin/packages/py-mako/package.py index 18a8dc0e68..ed4ccc7364 100644 --- a/var/spack/repos/builtin/packages/py-mako/package.py +++ b/var/spack/repos/builtin/packages/py-mako/package.py @@ -38,4 +38,4 @@ class PyMako(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mccabe/package.py b/var/spack/repos/builtin/packages/py-mccabe/package.py new file mode 100644 index 0000000000..ec913acb16 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-mccabe/package.py @@ -0,0 +1,53 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PyMccabe(Package): + """Ned's script to check McCabe complexity.""" + + homepage = "https://github.com/PyCQA/mccabe" + url = "https://github.com/PyCQA/mccabe/archive/0.5.2.tar.gz" + + version('0.5.2', '3cdf2d7faa1464b18905fe9a7063a632') + version('0.5.1', '864b364829156701bec797712be8ece0') + version('0.5.0', '71c0ce5e5c4676753525154f6c5d3af8') + version('0.4.0', '9cf5712e5f1785aaa27273a4328babe4') + version('0.3.1', '45c48c0978e6fc1f31fedcb918178abb') + version('0.3', 'c583f58ea28be12842c001473d77504d') + version('0.2.1', 'fcba311ebd999f48359a8ab28da94b30') + version('0.2', '36d4808c37e187dbb1fe2373a0ac6645') + version('0.1', '3c9e8e72612a9c01d865630cc569150a') + + extends('python') + depends_on('python@2.7:2.8,3.3:') + + depends_on('py-setuptools', type='build') + + # TODO: Add test dependencies + # depends_on('py-pytest', type='test') + + def install(self, spec, prefix): + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-mistune/package.py b/var/spack/repos/builtin/packages/py-mistune/package.py index 399c10005e..2daee1ed9a 100644 --- a/var/spack/repos/builtin/packages/py-mistune/package.py +++ b/var/spack/repos/builtin/packages/py-mistune/package.py @@ -43,4 +43,4 @@ class PyMistune(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mock/package.py b/var/spack/repos/builtin/packages/py-mock/package.py index 2c70535f19..d397b72329 100644 --- a/var/spack/repos/builtin/packages/py-mock/package.py +++ b/var/spack/repos/builtin/packages/py-mock/package.py @@ -40,4 +40,4 @@ class PyMock(Package): depends_on('py-setuptools@17.1:', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mpi4py/package.py b/var/spack/repos/builtin/packages/py-mpi4py/package.py index 1f0e52804e..11b1584397 100644 --- a/var/spack/repos/builtin/packages/py-mpi4py/package.py +++ b/var/spack/repos/builtin/packages/py-mpi4py/package.py @@ -43,4 +43,4 @@ class PyMpi4py(Package): depends_on('mpi') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mpmath/package.py b/var/spack/repos/builtin/packages/py-mpmath/package.py index e5bae34694..846852aeb5 100644 --- a/var/spack/repos/builtin/packages/py-mpmath/package.py +++ b/var/spack/repos/builtin/packages/py-mpmath/package.py @@ -35,4 +35,4 @@ class PyMpmath(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mx/package.py b/var/spack/repos/builtin/packages/py-mx/package.py index f5631916f6..e72b281665 100644 --- a/var/spack/repos/builtin/packages/py-mx/package.py +++ b/var/spack/repos/builtin/packages/py-mx/package.py @@ -40,4 +40,4 @@ class PyMx(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-mysqldb1/package.py b/var/spack/repos/builtin/packages/py-mysqldb1/package.py index 693fda6dbb..14534a57ca 100644 --- a/var/spack/repos/builtin/packages/py-mysqldb1/package.py +++ b/var/spack/repos/builtin/packages/py-mysqldb1/package.py @@ -36,4 +36,4 @@ class PyMysqldb1(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-nestle/package.py b/var/spack/repos/builtin/packages/py-nestle/package.py index 81f9fe4d09..03096586b9 100644 --- a/var/spack/repos/builtin/packages/py-nestle/package.py +++ b/var/spack/repos/builtin/packages/py-nestle/package.py @@ -41,4 +41,4 @@ class PyNestle(Package): depends_on('py-scipy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-netcdf/package.py b/var/spack/repos/builtin/packages/py-netcdf/package.py index e4f67d75a6..497f81f86d 100644 --- a/var/spack/repos/builtin/packages/py-netcdf/package.py +++ b/var/spack/repos/builtin/packages/py-netcdf/package.py @@ -38,4 +38,4 @@ class PyNetcdf(Package): depends_on('netcdf') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py index 79ad420f8f..aaeaac9674 100644 --- a/var/spack/repos/builtin/packages/py-networkx/package.py +++ b/var/spack/repos/builtin/packages/py-networkx/package.py @@ -39,4 +39,4 @@ class PyNetworkx(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py index eb3dd77219..f1872c85b4 100644 --- a/var/spack/repos/builtin/packages/py-nose/package.py +++ b/var/spack/repos/builtin/packages/py-nose/package.py @@ -40,4 +40,4 @@ class PyNose(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-numexpr/package.py b/var/spack/repos/builtin/packages/py-numexpr/package.py index b3b2e1d47d..51b4ef2e92 100644 --- a/var/spack/repos/builtin/packages/py-numexpr/package.py +++ b/var/spack/repos/builtin/packages/py-numexpr/package.py @@ -37,4 +37,4 @@ class PyNumexpr(Package): depends_on('py-numpy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py index 0d68a892f2..8cb2331637 100644 --- a/var/spack/repos/builtin/packages/py-numpy/package.py +++ b/var/spack/repos/builtin/packages/py-numpy/package.py @@ -66,21 +66,21 @@ class PyNumpy(Package): 'numpy/core/include') def install(self, spec, prefix): - libraries = [] - library_dirs = [] + # for build notes see http://www.scipy.org/scipylib/building/linux.html + lapackblas = LibraryList('') + if '+lapack' in spec: + lapackblas += spec['lapack'].lapack_libs if '+blas' in spec: - libraries.append('blas') - library_dirs.append(spec['blas'].prefix.lib) - if '+lapack' in spec: - libraries.append('lapack') - library_dirs.append(spec['lapack'].prefix.lib) + lapackblas += spec['blas'].blas_libs if '+blas' in spec or '+lapack' in spec: with open('site.cfg', 'w') as f: f.write('[DEFAULT]\n') - f.write('libraries=%s\n' % ','.join(libraries)) - f.write('library_dirs=%s\n' % ':'.join(library_dirs)) - f.write('rpath=%s\n' % ':'.join(library_dirs)) + f.write('libraries=%s\n' % ','.join(lapackblas.names)) + f.write('library_dirs=%s\n' % ':'.join(lapackblas.directories)) + if not ((platform.system() == "Darwin") and + (platform.mac_ver()[0] == '10.12')): + f.write('rpath=%s\n' % ':'.join(lapackblas.directories)) setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-openpyxl/package.py b/var/spack/repos/builtin/packages/py-openpyxl/package.py index fa32cb879f..94949076ef 100644 --- a/var/spack/repos/builtin/packages/py-openpyxl/package.py +++ b/var/spack/repos/builtin/packages/py-openpyxl/package.py @@ -39,4 +39,4 @@ class PyOpenpyxl(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-ordereddict/package.py b/var/spack/repos/builtin/packages/py-ordereddict/package.py new file mode 100644 index 0000000000..6e038b789c --- /dev/null +++ b/var/spack/repos/builtin/packages/py-ordereddict/package.py @@ -0,0 +1,41 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PyOrdereddict(Package): + """A drop-in substitute for Py2.7's new collections. + OrderedDict that works in Python 2.4-2.6.""" + + homepage = "https://pypi.python.org/pypi/ordereddict" + url = "https://pypi.python.org/packages/source/o/ordereddict/ordereddict-1.1.tar.gz" + + version('1.1', 'a0ed854ee442051b249bfad0f638bbec') + + extends('python') + depends_on('python@2.4:2.6.999') + + def install(self, spec, prefix): + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py index 37234ae652..ae797cee40 100644 --- a/var/spack/repos/builtin/packages/py-pandas/package.py +++ b/var/spack/repos/builtin/packages/py-pandas/package.py @@ -51,4 +51,4 @@ class PyPandas(Package): depends_on('py-bottleneck', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pbr/package.py b/var/spack/repos/builtin/packages/py-pbr/package.py index a0cfe0e5a5..0251e436b1 100644 --- a/var/spack/repos/builtin/packages/py-pbr/package.py +++ b/var/spack/repos/builtin/packages/py-pbr/package.py @@ -38,4 +38,4 @@ class PyPbr(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pep8/package.py b/var/spack/repos/builtin/packages/py-pep8/package.py deleted file mode 100644 index 87d1da9ab0..0000000000 --- a/var/spack/repos/builtin/packages/py-pep8/package.py +++ /dev/null @@ -1,15 +0,0 @@ -from spack import * - - -class PyPep8(Package): - """python pep8 format checker""" - homepage = "https://github.com/PyCQA/pycodestyle" - url = "https://github.com/PyCQA/pycodestyle/archive/1.7.0.tar.gz" - - version('1.7.0', '31070a3a6391928893cbf5fa523eb8d9') - - extends('python') - depends_on('py-setuptools', type='build') - - def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-periodictable/package.py b/var/spack/repos/builtin/packages/py-periodictable/package.py index 51d9cc2046..4a5b015e60 100644 --- a/var/spack/repos/builtin/packages/py-periodictable/package.py +++ b/var/spack/repos/builtin/packages/py-periodictable/package.py @@ -39,4 +39,4 @@ class PyPeriodictable(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pexpect/package.py b/var/spack/repos/builtin/packages/py-pexpect/package.py index 8a99c0473b..9e43e4526e 100644 --- a/var/spack/repos/builtin/packages/py-pexpect/package.py +++ b/var/spack/repos/builtin/packages/py-pexpect/package.py @@ -35,4 +35,4 @@ class PyPexpect(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-phonopy/package.py b/var/spack/repos/builtin/packages/py-phonopy/package.py index d5b3313a98..a4ad03f34d 100644 --- a/var/spack/repos/builtin/packages/py-phonopy/package.py +++ b/var/spack/repos/builtin/packages/py-phonopy/package.py @@ -40,4 +40,4 @@ class PyPhonopy(Package): depends_on('py-pyyaml', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--home=%s' % prefix) + setup_py('install', '--home=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pil/package.py b/var/spack/repos/builtin/packages/py-pil/package.py index 29a86d1e85..f5d684962d 100644 --- a/var/spack/repos/builtin/packages/py-pil/package.py +++ b/var/spack/repos/builtin/packages/py-pil/package.py @@ -43,4 +43,4 @@ class PyPil(Package): depends_on('python@1.5.2:2.8') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py index 211e3b4199..3125a822b4 100644 --- a/var/spack/repos/builtin/packages/py-pillow/package.py +++ b/var/spack/repos/builtin/packages/py-pillow/package.py @@ -129,5 +129,5 @@ class PyPillow(Package): variants = ['jpeg', 'zlib', 'tiff', 'freetype', 'lcms', 'jpeg2000'] build_args = list(map(variant_to_flag, variants)) - python('setup.py', 'build_ext', *build_args) - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('build_ext', *build_args) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-ply/package.py b/var/spack/repos/builtin/packages/py-ply/package.py index 47cd3b5dc8..d249de64fa 100644 --- a/var/spack/repos/builtin/packages/py-ply/package.py +++ b/var/spack/repos/builtin/packages/py-ply/package.py @@ -35,4 +35,4 @@ class PyPly(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pmw/package.py b/var/spack/repos/builtin/packages/py-pmw/package.py index e0a332a6bf..5173864f62 100644 --- a/var/spack/repos/builtin/packages/py-pmw/package.py +++ b/var/spack/repos/builtin/packages/py-pmw/package.py @@ -36,4 +36,4 @@ class PyPmw(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-prettytable/package.py b/var/spack/repos/builtin/packages/py-prettytable/package.py index fa1c17ae8c..55a323a0d6 100644 --- a/var/spack/repos/builtin/packages/py-prettytable/package.py +++ b/var/spack/repos/builtin/packages/py-prettytable/package.py @@ -40,4 +40,4 @@ class PyPrettytable(Package): depends_on("py-setuptools", type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-protobuf/package.py b/var/spack/repos/builtin/packages/py-protobuf/package.py index dd2b5651de..d1186775bb 100644 --- a/var/spack/repos/builtin/packages/py-protobuf/package.py +++ b/var/spack/repos/builtin/packages/py-protobuf/package.py @@ -47,4 +47,4 @@ class PyProtobuf(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-py2neo/package.py b/var/spack/repos/builtin/packages/py-py2neo/package.py index 97632493af..aed0859021 100644 --- a/var/spack/repos/builtin/packages/py-py2neo/package.py +++ b/var/spack/repos/builtin/packages/py-py2neo/package.py @@ -43,4 +43,4 @@ class PyPy2neo(Package): extends("python") def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pychecker/package.py b/var/spack/repos/builtin/packages/py-pychecker/package.py index e81c3dbc9b..b1f8aad33f 100644 --- a/var/spack/repos/builtin/packages/py-pychecker/package.py +++ b/var/spack/repos/builtin/packages/py-pychecker/package.py @@ -35,4 +35,4 @@ class PyPychecker(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pycodestyle/package.py b/var/spack/repos/builtin/packages/py-pycodestyle/package.py new file mode 100644 index 0000000000..964822aac9 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pycodestyle/package.py @@ -0,0 +1,52 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PyPycodestyle(Package): + """pycodestyle is a tool to check your Python code against some of the + style conventions in PEP 8. Note: formerly called pep8.""" + + homepage = "https://github.com/PyCQA/pycodestyle" + url = "https://github.com/PyCQA/pycodestyle/archive/2.0.0.tar.gz" + + version('2.0.0', '5c3e90001f538bf3b7896d60e92eb6f6') + version('1.7.0', '31070a3a6391928893cbf5fa523eb8d9') + version('1.6.2', '8df18246d82ddd3d19ffe7518f983955') + version('1.6.1', '9d59bdc7c60f46f7cee86c732e28aa1a') + version('1.6', '340fa7e39bb44fb08db6eddf7cdc880a') + version('1.5.7', '6d0f5fc7d95755999bc9275cad5cbf3e') + version('1.5.6', 'c5c30e3d267b48bf3dfe7568e803a813') + version('1.5.5', 'cfa12df9b86b3a1dfb13aced1927e12f') + version('1.5.4', '3977a760829652543544074c684610ee') + + extends('python') + + # Most Python packages only require py-setuptools as a build dependency. + # However, py-pycodestyle requires py-setuptools during runtime as well. + depends_on('py-setuptools', type=nolink) + + def install(self, spec, prefix): + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-pycparser/package.py b/var/spack/repos/builtin/packages/py-pycparser/package.py index ef1b772ffc..e7b91f4495 100644 --- a/var/spack/repos/builtin/packages/py-pycparser/package.py +++ b/var/spack/repos/builtin/packages/py-pycparser/package.py @@ -36,4 +36,4 @@ class PyPycparser(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pydatalog/package.py b/var/spack/repos/builtin/packages/py-pydatalog/package.py index da157d3679..b2203ae04c 100644 --- a/var/spack/repos/builtin/packages/py-pydatalog/package.py +++ b/var/spack/repos/builtin/packages/py-pydatalog/package.py @@ -35,4 +35,4 @@ class PyPydatalog(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pyelftools/package.py b/var/spack/repos/builtin/packages/py-pyelftools/package.py index bf781daf83..96a5645541 100644 --- a/var/spack/repos/builtin/packages/py-pyelftools/package.py +++ b/var/spack/repos/builtin/packages/py-pyelftools/package.py @@ -36,4 +36,4 @@ class PyPyelftools(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pyflakes/package.py b/var/spack/repos/builtin/packages/py-pyflakes/package.py new file mode 100644 index 0000000000..50bc8ab462 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-pyflakes/package.py @@ -0,0 +1,52 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PyPyflakes(Package): + """A simple program which checks Python source files for errors..""" + + homepage = "https://github.com/PyCQA/pyflakes" + url = "https://github.com/PyCQA/pyflakes/archive/1.3.0.tar.gz" + + version('1.3.0', 'a76173deb7a84fe860c0b60e2fbcdfe2') + version('1.2.3', '2ac2e148a5c46b6bb06c4785be76f7cc') + version('1.2.2', 'fe759b9381a6500e67a2ddbbeb5161a4') + version('1.2.1', '444a06b256e0a70e41c11698b7190e84') + version('1.2.0', '5d1c87bf09696c4c35dc3103f2a1185c') + version('1.1.0', '4e18bf78c0455ebcd41e5d6104392c88') + version('1.0.0', 'e2ea22a825c5100f12e54b71771cde71') + version('0.9.2', 'd02d5f68e944085fd6ec163a34737a96') + version('0.9.1', '8108d2248e93ca6a315fa2dd31ee9bb1') + version('0.9.0', '43c2bcee88606bde55dbf25a253ef886') + + extends('python') + + # Most Python packages only require py-setuptools as a build dependency. + # However, py-pyflakes requires py-setuptools during runtime as well. + depends_on('py-setuptools', type=nolink) + + def install(self, spec, prefix): + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py index c505d44530..10f60f0ab9 100644 --- a/var/spack/repos/builtin/packages/py-pylint/package.py +++ b/var/spack/repos/builtin/packages/py-pylint/package.py @@ -38,4 +38,4 @@ class PyPylint(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pypar/package.py b/var/spack/repos/builtin/packages/py-pypar/package.py index 6fef71304c..90d1f957b6 100644 --- a/var/spack/repos/builtin/packages/py-pypar/package.py +++ b/var/spack/repos/builtin/packages/py-pypar/package.py @@ -39,4 +39,4 @@ class PyPypar(Package): def install(self, spec, prefix): with working_dir('source'): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pyparsing/package.py b/var/spack/repos/builtin/packages/py-pyparsing/package.py index 67d255b02d..c3b4432d33 100644 --- a/var/spack/repos/builtin/packages/py-pyparsing/package.py +++ b/var/spack/repos/builtin/packages/py-pyparsing/package.py @@ -35,4 +35,4 @@ class PyPyparsing(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pytables/package.py b/var/spack/repos/builtin/packages/py-pytables/package.py index f87e74211f..289d76242b 100644 --- a/var/spack/repos/builtin/packages/py-pytables/package.py +++ b/var/spack/repos/builtin/packages/py-pytables/package.py @@ -42,4 +42,4 @@ class PyPytables(Package): def install(self, spec, prefix): env["HDF5_DIR"] = spec['hdf5'].prefix - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-python-daemon/package.py b/var/spack/repos/builtin/packages/py-python-daemon/package.py index a30dc00ba4..c696b7490b 100644 --- a/var/spack/repos/builtin/packages/py-python-daemon/package.py +++ b/var/spack/repos/builtin/packages/py-python-daemon/package.py @@ -47,4 +47,4 @@ class PyPythonDaemon(Package): depends_on("py-lockfile", type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-pyyaml/package.py b/var/spack/repos/builtin/packages/py-pyyaml/package.py index 8da391fac1..d760fcaae9 100644 --- a/var/spack/repos/builtin/packages/py-pyyaml/package.py +++ b/var/spack/repos/builtin/packages/py-pyyaml/package.py @@ -35,4 +35,4 @@ class PyPyyaml(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-restview/package.py b/var/spack/repos/builtin/packages/py-restview/package.py index 047214c58e..9b0bd437c0 100644 --- a/var/spack/repos/builtin/packages/py-restview/package.py +++ b/var/spack/repos/builtin/packages/py-restview/package.py @@ -38,4 +38,4 @@ class PyRestview(Package): depends_on('py-pygments', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-rpy2/package.py b/var/spack/repos/builtin/packages/py-rpy2/package.py index f86d813766..9db55e949b 100644 --- a/var/spack/repos/builtin/packages/py-rpy2/package.py +++ b/var/spack/repos/builtin/packages/py-rpy2/package.py @@ -44,4 +44,4 @@ class PyRpy2(Package): depends_on('R') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scientificpython/package.py b/var/spack/repos/builtin/packages/py-scientificpython/package.py index e2273dc164..6e8b9d38c1 100644 --- a/var/spack/repos/builtin/packages/py-scientificpython/package.py +++ b/var/spack/repos/builtin/packages/py-scientificpython/package.py @@ -38,4 +38,4 @@ class PyScientificpython(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scikit-image/package.py b/var/spack/repos/builtin/packages/py-scikit-image/package.py index fbeb5c95ca..124d2ec4b7 100644 --- a/var/spack/repos/builtin/packages/py-scikit-image/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-image/package.py @@ -45,4 +45,4 @@ class PyScikitImage(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py index 3cd7ea74f3..09318a6c56 100644 --- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py +++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py @@ -41,4 +41,4 @@ class PyScikitLearn(Package): depends_on('py-scipy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py index abb843f8ee..71bf83c3f0 100644 --- a/var/spack/repos/builtin/packages/py-scipy/package.py +++ b/var/spack/repos/builtin/packages/py-scipy/package.py @@ -45,15 +45,10 @@ class PyScipy(Package): # Known not to work with 2.23, 2.25 depends_on('binutils@2.26:', type='build') depends_on('py-numpy@1.7.1:+blas+lapack', type=nolink) + depends_on('blas') + depends_on('lapack') def install(self, spec, prefix): - if 'atlas' in spec: - # libatlas.so actually isn't always installed, but this - # seems to make the build autodetect things correctly. - env['ATLAS'] = join_path( - spec['atlas'].prefix.lib, 'libatlas.' + dso_suffix) - else: - env['BLAS'] = spec['blas'].blas_libs.joined() - env['LAPACK'] = spec['lapack'].lapack_libs.joined() - + # NOTE: scipy picks up Blas/Lapack from numpy, see + # http://www.scipy.org/scipylib/building/linux.html#step-4-build-numpy-1-5-0 setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-shiboken/package.py b/var/spack/repos/builtin/packages/py-shiboken/package.py index 0713f26ebc..c6c23acd33 100644 --- a/var/spack/repos/builtin/packages/py-shiboken/package.py +++ b/var/spack/repos/builtin/packages/py-shiboken/package.py @@ -64,6 +64,4 @@ class PyShiboken(Package): 'shiboken_postinstall.py') def install(self, spec, prefix): - python('setup.py', 'install', - '--prefix=%s' % prefix, - '--jobs=%s' % make_jobs) + setup_py('install', '--prefix=%s' % prefix, '--jobs=%s' % make_jobs) diff --git a/var/spack/repos/builtin/packages/py-sncosmo/package.py b/var/spack/repos/builtin/packages/py-sncosmo/package.py index feb3856e8b..d5e32fd5e2 100644 --- a/var/spack/repos/builtin/packages/py-sncosmo/package.py +++ b/var/spack/repos/builtin/packages/py-sncosmo/package.py @@ -48,4 +48,4 @@ class PySncosmo(Package): depends_on('py-nestle', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-storm/package.py b/var/spack/repos/builtin/packages/py-storm/package.py index 0e972480f3..74fb2add0d 100644 --- a/var/spack/repos/builtin/packages/py-storm/package.py +++ b/var/spack/repos/builtin/packages/py-storm/package.py @@ -37,4 +37,4 @@ class PyStorm(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-symengine/package.py b/var/spack/repos/builtin/packages/py-symengine/package.py new file mode 100644 index 0000000000..7f7cd84a77 --- /dev/null +++ b/var/spack/repos/builtin/packages/py-symengine/package.py @@ -0,0 +1,47 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class PySymengine(Package): + """Python wrappers for SymEngine, a symbolic manipulation library.""" + + homepage = "https://github.com/symengine/symengine.py" + url = "https://github.com/symengine/symengine.py/archive/v0.2.0.tar.gz" + + version('0.2.0', 'e1d114fa12be4c8c7e9f24007e07718c') + version('develop', git='https://github.com/symengine/symengine.py.git') + + # Build dependencies + extends('python') + depends_on('python@2.7:2.8,3.3:') + depends_on('py-setuptools', type='build') + depends_on('py-cython@0.19.1:') + depends_on('cmake@2.8.7:', type='build') + depends_on('symengine@0.2.0:') + + def install(self, spec, prefix): + python('setup.py', 'install', '--prefix=%s --symengine-dir=%s' % + (prefix, spec['symengine'].prefix)) diff --git a/var/spack/repos/builtin/packages/py-sympy/package.py b/var/spack/repos/builtin/packages/py-sympy/package.py index 3d8b86ac4d..7d1b016263 100644 --- a/var/spack/repos/builtin/packages/py-sympy/package.py +++ b/var/spack/repos/builtin/packages/py-sympy/package.py @@ -37,4 +37,4 @@ class PySympy(Package): depends_on('py-mpmath', when='@1.0:') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-tappy/package.py b/var/spack/repos/builtin/packages/py-tappy/package.py index 03e9528ad7..c195d08fd9 100644 --- a/var/spack/repos/builtin/packages/py-tappy/package.py +++ b/var/spack/repos/builtin/packages/py-tappy/package.py @@ -37,4 +37,4 @@ class PyTappy(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-tuiview/package.py b/var/spack/repos/builtin/packages/py-tuiview/package.py index 5caf3ff143..e20de3ff79 100644 --- a/var/spack/repos/builtin/packages/py-tuiview/package.py +++ b/var/spack/repos/builtin/packages/py-tuiview/package.py @@ -41,4 +41,4 @@ class PyTuiview(Package): depends_on("gdal") def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-twisted/package.py b/var/spack/repos/builtin/packages/py-twisted/package.py index edf1e7b0d7..80023c2123 100644 --- a/var/spack/repos/builtin/packages/py-twisted/package.py +++ b/var/spack/repos/builtin/packages/py-twisted/package.py @@ -38,4 +38,4 @@ class PyTwisted(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-unittest2/package.py b/var/spack/repos/builtin/packages/py-unittest2/package.py index 174ab0ca5f..ddd50a37d4 100644 --- a/var/spack/repos/builtin/packages/py-unittest2/package.py +++ b/var/spack/repos/builtin/packages/py-unittest2/package.py @@ -38,4 +38,4 @@ class PyUnittest2(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-unittest2py3k/package.py b/var/spack/repos/builtin/packages/py-unittest2py3k/package.py index 95e6dcbff0..4aee545d74 100644 --- a/var/spack/repos/builtin/packages/py-unittest2py3k/package.py +++ b/var/spack/repos/builtin/packages/py-unittest2py3k/package.py @@ -39,4 +39,4 @@ class PyUnittest2py3k(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-urwid/package.py b/var/spack/repos/builtin/packages/py-urwid/package.py index 943fb250f6..61dec3f1cd 100644 --- a/var/spack/repos/builtin/packages/py-urwid/package.py +++ b/var/spack/repos/builtin/packages/py-urwid/package.py @@ -37,4 +37,4 @@ class PyUrwid(Package): extends("python") def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py index 0ed567df95..f373067144 100644 --- a/var/spack/repos/builtin/packages/py-virtualenv/package.py +++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py @@ -38,4 +38,4 @@ class PyVirtualenv(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-wcsaxes/package.py b/var/spack/repos/builtin/packages/py-wcsaxes/package.py index 9588b879fa..ddc10d3e22 100644 --- a/var/spack/repos/builtin/packages/py-wcsaxes/package.py +++ b/var/spack/repos/builtin/packages/py-wcsaxes/package.py @@ -40,4 +40,4 @@ class PyWcsaxes(Package): depends_on('py-astropy', type=nolink) def install(self, spec, prefix): - python('setup.py', 'install', '--prefix={0}'.format(prefix)) + setup_py('install', '--prefix={0}'.format(prefix)) diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py index f0ad340835..ce495bf6d0 100644 --- a/var/spack/repos/builtin/packages/py-wheel/package.py +++ b/var/spack/repos/builtin/packages/py-wheel/package.py @@ -37,4 +37,4 @@ class PyWheel(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-xlrd/package.py b/var/spack/repos/builtin/packages/py-xlrd/package.py index 81c3c928c0..9638b8a36b 100644 --- a/var/spack/repos/builtin/packages/py-xlrd/package.py +++ b/var/spack/repos/builtin/packages/py-xlrd/package.py @@ -37,4 +37,4 @@ class PyXlrd(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/py-yapf/package.py b/var/spack/repos/builtin/packages/py-yapf/package.py index bc26b82b07..eab4d54abc 100644 --- a/var/spack/repos/builtin/packages/py-yapf/package.py +++ b/var/spack/repos/builtin/packages/py-yapf/package.py @@ -37,4 +37,4 @@ class PyYapf(Package): depends_on('py-setuptools', type='build') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/scons/package.py b/var/spack/repos/builtin/packages/scons/package.py index 2c32bde4a1..d20e529384 100644 --- a/var/spack/repos/builtin/packages/scons/package.py +++ b/var/spack/repos/builtin/packages/scons/package.py @@ -35,4 +35,4 @@ class Scons(Package): extends('python') def install(self, spec, prefix): - python('setup.py', 'install', '--prefix=%s' % prefix) + setup_py('install', '--prefix=%s' % prefix) diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py index 513f8ec6d4..78c2e14b34 100644 --- a/var/spack/repos/builtin/packages/sqlite/package.py +++ b/var/spack/repos/builtin/packages/sqlite/package.py @@ -23,6 +23,7 @@ # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * +from spack import architecture class Sqlite(Package): @@ -35,7 +36,15 @@ class Sqlite(Package): version('3.8.5', '0544ef6d7afd8ca797935ccc2685a9ed', url='http://www.sqlite.org/2014/sqlite-autoconf-3080500.tar.gz') + def get_arch(self): + arch = architecture.Arch() + arch.platform = architecture.platform() + return str(arch.platform.target('default_target')) + def install(self, spec, prefix): - configure("--prefix=" + prefix) + config = ["--prefix=" + prefix] + if self.get_arch() == 'ppc64le': + config.append("--build=powerpc64le-redhat-linux-gnu") + configure(*config) make() make("install") diff --git a/var/spack/repos/builtin/packages/symengine/package.py b/var/spack/repos/builtin/packages/symengine/package.py new file mode 100644 index 0000000000..f3fc13474c --- /dev/null +++ b/var/spack/repos/builtin/packages/symengine/package.py @@ -0,0 +1,113 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Symengine(Package): + """SymEngine is a fast symbolic manipulation library, written in C++.""" + + homepage = "https://github.com/symengine/symengine" + url = "https://github.com/symengine/symengine/archive/v0.2.0.tar.gz" + + version('0.2.0', '45401561add36a13c1f0b0c5f8d7422d') + version('0.1.0', '41ad7daed61fc5a77c285eb6c7303425') + version('develop', git='https://github.com/symengine/symengine.git') + + variant('flint', default=True, + description='Compile with Flint integer library') + variant('mpc', default=True, + description='Compile with MPC library') + variant('mpfr', default=True, + description='Compile with MPFR library') + variant('piranha', default=False, + description='Compile with Piranha integer library') + variant('thread_safe', default=True, + description='Enable thread safety option') + variant('openmp', default=False, + description='Enable OpenMP support') + variant('shared', default=True, + description='Enables the build of shared libraries') + + # Build dependencies + depends_on('cmake', type='build') + + # Other dependencies + depends_on('gmp') # mpir is a drop-in replacement for this + depends_on('mpc', when='+mpc') # Could also be built against mpir + depends_on('mpfr', when='+mpfr') # Could also be built against mpir + depends_on('flint', when='+flint') # Could also be built against mpir + depends_on('piranha', when='+piranha~flint') # Could also be built against mpir # NOQA + + def install(self, spec, prefix): + options = [] + options.extend(std_cmake_args) + + # CMAKE_BUILD_TYPE should be Debug | Release + for word in options[:]: + if word.startswith('-DCMAKE_BUILD_TYPE'): + options.remove(word) + + # See https://github.com/symengine/symengine/blob/master/README.md + # for build options + options.extend([ + '-DCMAKE_BUILD_TYPE=Release', + '-DWITH_SYMENGINE_RCP:BOOL=ON', + '-DWITH_SYMENGINE_THREAD_SAFE:BOOL=%s' % ( + 'ON' if ('+thread_safe' or '+openmp') in spec else 'OFF'), + '-DBUILD_TESTS:BOOL=ON', + '-DBUILD_BENCHMARKS:BOOL=ON', + '-DWITH_MPC:BOOL=%s' % ( + 'ON' if '+mpc' in spec else 'OFF'), + '-DWITH_MPFR:BOOL=%s' % ( + 'ON' if '+mpfr' in spec else 'OFF'), + '-DINTEGER_CLASS:STRING=gmp', + '-DWITH_OPENMP:BOOL=%s' % ( + 'ON' if '+openmp' in spec else 'OFF'), + '-DBUILD_SHARED_LIBS:BOOL=%s' % ( + 'ON' if '+shared' in spec else 'OFF'), + ]) + + if '+flint' in spec: + options.extend([ + '-DWITH_FLINT:BOOL=ON', + '-DINTEGER_CLASS:STRING=flint' + ]) + elif '+piranha' in spec: + options.extend([ + '-DWITH_PIRANHA:BOOL=ON', + '-DINTEGER_CLASS:STRING=piranha' + ]) + else: + options.extend([ + '-DINTEGER_CLASS:STRING=gmp' + ]) + + with working_dir('spack-build', create=True): + cmake('..', *options) + + make() + make('install') + if self.run_tests: + ctest() diff --git a/var/spack/repos/builtin/packages/tethex/package.py b/var/spack/repos/builtin/packages/tethex/package.py new file mode 100644 index 0000000000..624942498e --- /dev/null +++ b/var/spack/repos/builtin/packages/tethex/package.py @@ -0,0 +1,49 @@ +############################################################################## +# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC. +# Produced at the Lawrence Livermore National Laboratory. +# +# This file is part of Spack. +# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. +# LLNL-CODE-647188 +# +# For details, see https://github.com/llnl/spack +# Please also see the LICENSE file for our notice and the LGPL. +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU Lesser General Public License (as +# published by the Free Software Foundation) version 2.1, February 1999. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and +# conditions of the GNU Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA +############################################################################## +from spack import * + + +class Tethex(Package): + """Tethex is designed to convert triangular (in 2D) or tetrahedral (in 3D) + Gmsh's mesh to quadrilateral or hexahedral one respectively. These meshes + can be used in software packages working with hexahedrals only - for + example, deal.II. + """ + + homepage = "https://github.com/martemyev/tethex" + url = "https://github.com/martemyev/tethex/archive/v0.0.7.tar.gz" + + version('0.0.7', '6c9e4a18a6637deb4400c6d77ec03184') + version('develop', git='https://github.com/martemyev/tethex.git') + + depends_on('cmake', type='build') + + def install(self, spec, prefix): + cmake('.') + make() + + # install by hand + mkdirp(prefix.bin) + install('tethex', prefix.bin) diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py index 203e1502d6..4c7a94ce3f 100644 --- a/var/spack/repos/builtin/packages/trilinos/package.py +++ b/var/spack/repos/builtin/packages/trilinos/package.py @@ -149,10 +149,10 @@ class Trilinos(Package): '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix, '-DTPL_ENABLE_BLAS=ON', '-DBLAS_LIBRARY_NAMES=%s' % ';'.join(blas.names), - '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib, + '-DBLAS_LIBRARY_DIRS=%s' % ';'.join(blas.directories), '-DTPL_ENABLE_LAPACK=ON', '-DLAPACK_LIBRARY_NAMES=%s' % ';'.join(lapack.names), - '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix.lib, + '-DLAPACK_LIBRARY_DIRS=%s' % ';'.join(lapack.directories), '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON', '-DTrilinos_ENABLE_CXX11:BOOL=ON', '-DTPL_ENABLE_Netcdf:BOOL=ON', diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py index f7015904b0..fe3ebc9536 100644 --- a/var/spack/repos/builtin/packages/vtk/package.py +++ b/var/spack/repos/builtin/packages/vtk/package.py @@ -42,12 +42,16 @@ class Vtk(Package): patch("gcc.patch") + extends('python', when='+python') + depends_on('python', when='+python') depends_on('cmake', type='build') depends_on("qt") # VTK7 defaults to OpenGL2 rendering backend variant('opengl2', default=True, description='Build with OpenGL instead of OpenGL2 backend') + variant('python', default=False, + description='Build the python modules') def install(self, spec, prefix): def feature_to_bool(feature, on='ON', off='OFF'): @@ -59,8 +63,8 @@ class Vtk(Package): cmake_args = [ "..", "-DBUILD_SHARED_LIBS=ON", + "-DVTK_WRAP_PYTHON=" + ("ON" if "+python" in spec else "OFF"), # Disable wrappers for other languages. - "-DVTK_WRAP_PYTHON=OFF", "-DVTK_WRAP_JAVA=OFF", "-DVTK_WRAP_TCL=OFF"] cmake_args.extend(std_cmake_args) |