summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/.gitignore1
-rw-r--r--lib/spack/docs/Makefile18
-rw-r--r--lib/spack/docs/basic_usage.rst755
-rw-r--r--lib/spack/docs/command_index.in10
-rw-r--r--lib/spack/docs/conf.py18
-rw-r--r--lib/spack/docs/developer_guide.rst10
-rw-r--r--lib/spack/docs/features.rst2
-rw-r--r--lib/spack/docs/index.rst2
-rw-r--r--lib/spack/docs/mirrors.rst217
-rw-r--r--lib/spack/docs/packaging_guide.rst699
-rw-r--r--lib/spack/docs/site_configuration.rst249
-rwxr-xr-xlib/spack/env/cc472
-rw-r--r--lib/spack/external/argparse.py15
-rw-r--r--lib/spack/llnl/util/filesystem.py49
-rw-r--r--lib/spack/llnl/util/lang.py61
-rw-r--r--lib/spack/llnl/util/link_tree.py197
-rw-r--r--lib/spack/llnl/util/tty/__init__.py75
-rw-r--r--lib/spack/llnl/util/tty/colify.py207
-rw-r--r--lib/spack/llnl/util/tty/color.py26
-rw-r--r--lib/spack/spack/__init__.py6
-rw-r--r--lib/spack/spack/architecture.py2
-rw-r--r--lib/spack/spack/build_environment.py91
-rw-r--r--lib/spack/spack/cmd/__init__.py15
-rw-r--r--lib/spack/spack/cmd/activate.py58
-rw-r--r--lib/spack/spack/cmd/checksum.py37
-rw-r--r--lib/spack/spack/cmd/clean.py28
-rw-r--r--lib/spack/spack/cmd/compiler.py12
-rw-r--r--lib/spack/spack/cmd/create.py40
-rw-r--r--lib/spack/spack/cmd/deactivate.py104
-rw-r--r--lib/spack/spack/cmd/env.py69
-rw-r--r--lib/spack/spack/cmd/extensions.py98
-rw-r--r--lib/spack/spack/cmd/find.py104
-rw-r--r--lib/spack/spack/cmd/graph.py40
-rw-r--r--lib/spack/spack/cmd/info.py94
-rw-r--r--lib/spack/spack/cmd/install.py6
-rw-r--r--lib/spack/spack/cmd/list.py1
-rw-r--r--lib/spack/spack/cmd/location.py47
-rw-r--r--lib/spack/spack/cmd/md5.py1
-rw-r--r--lib/spack/spack/cmd/package-list.py95
-rw-r--r--lib/spack/spack/cmd/pkg.py19
-rw-r--r--lib/spack/spack/cmd/restage.py46
-rw-r--r--lib/spack/spack/cmd/spec.py2
-rw-r--r--lib/spack/spack/cmd/uninstall.py5
-rw-r--r--lib/spack/spack/cmd/urls.py58
-rw-r--r--lib/spack/spack/cmd/versions.py20
-rw-r--r--lib/spack/spack/compilation.py117
-rw-r--r--lib/spack/spack/compiler.py6
-rw-r--r--lib/spack/spack/compilers/__init__.py2
-rw-r--r--lib/spack/spack/concretize.py5
-rw-r--r--lib/spack/spack/directory_layout.py248
-rw-r--r--lib/spack/spack/error.py6
-rw-r--r--lib/spack/spack/fetch_strategy.py57
-rw-r--r--lib/spack/spack/graph.py553
-rw-r--r--lib/spack/spack/hooks/__init__.py12
-rw-r--r--lib/spack/spack/hooks/extensions.py36
-rw-r--r--lib/spack/spack/mirror.py16
-rw-r--r--lib/spack/spack/modules.py8
-rw-r--r--lib/spack/spack/package.py568
-rw-r--r--lib/spack/spack/packages.py58
-rw-r--r--lib/spack/spack/relations.py33
-rw-r--r--lib/spack/spack/spec.py38
-rw-r--r--lib/spack/spack/stage.py5
-rw-r--r--lib/spack/spack/test/__init__.py4
-rw-r--r--lib/spack/spack/test/cc.py130
-rw-r--r--lib/spack/spack/test/git_fetch.py4
-rw-r--r--lib/spack/spack/test/hg_fetch.py4
-rw-r--r--lib/spack/spack/test/link_tree.py153
-rw-r--r--lib/spack/spack/test/mirror.py8
-rw-r--r--lib/spack/spack/test/python_version.py1
-rw-r--r--lib/spack/spack/test/svn_fetch.py4
-rw-r--r--lib/spack/spack/test/url_extrapolate.py130
-rw-r--r--lib/spack/spack/test/url_parse.py32
-rw-r--r--lib/spack/spack/url.py368
-rw-r--r--lib/spack/spack/util/compression.py8
-rw-r--r--lib/spack/spack/util/environment.py9
-rw-r--r--lib/spack/spack/util/web.py40
-rw-r--r--lib/spack/spack/virtual.py45
77 files changed, 5317 insertions, 1572 deletions
diff --git a/lib/spack/docs/.gitignore b/lib/spack/docs/.gitignore
index 7701dd9f12..26c343d3eb 100644
--- a/lib/spack/docs/.gitignore
+++ b/lib/spack/docs/.gitignore
@@ -1,3 +1,4 @@
package_list.rst
+command_index.rst
spack*.rst
_build
diff --git a/lib/spack/docs/Makefile b/lib/spack/docs/Makefile
index e3068ea10c..00203b5b61 100644
--- a/lib/spack/docs/Makefile
+++ b/lib/spack/docs/Makefile
@@ -25,7 +25,19 @@ all: html
# This autogenerates a package list.
#
package_list:
- spack info -r > package_list.rst
+ spack package-list > package_list.rst
+
+#
+# Generate a command index
+#
+command_index:
+ cp command_index.in command_index.rst
+ echo >> command_index.rst
+ grep -ho '.. _spack-.*:' *rst \
+ | perl -pe 's/.. _([^:]*):/ * :ref:`\1`/' \
+ | sort >> command_index.rst
+
+custom_targets: package_list command_index
#
# This creates a git repository and commits generated html docs.
@@ -77,10 +89,10 @@ help:
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
- -rm -f package_list.rst
+ -rm -f package_list.rst command_index.rst
-rm -rf $(BUILDDIR)/* $(APIDOC_FILES)
-html: apidoc package_list
+html: apidoc custom_targets
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 196b7077f9..ed79790bb9 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -4,18 +4,19 @@ Basic usage
=====================
Spack is implemented as a single command (``spack``) with many
-*subcommands*, much like ``git``, ``svn``, ``yum``, or ``apt-get``.
-Only a small subset of commands are needed for typical usage.
-
-This section covers a small set of subcommands that should cover most
-general use cases for Spack.
+*subcommands*. Only a small subset of commands is needed for typical
+usage.
Listing available packages
------------------------------
-The first thing you will likely want to do with spack is find out what
-software is available to install. There are a few relevant commands.
+The first thing you likely want to do with spack is to install some
+software. Before that, you need to know what's available. You can
+see available package names either using the :ref:`package-list`, or
+using the commands below.
+
+.. _spack-list:
``spack list``
~~~~~~~~~~~~~~~~
@@ -26,45 +27,292 @@ Spack can install:
.. command-output:: spack list
The packages are listed by name in alphabetical order. You can also
-do wildcard searches using ``*``:
+do wildcats searches using ``*``:
.. command-output:: spack list m*
.. command-output:: spack list *util*
+.. _spack-info:
``spack info``
~~~~~~~~~~~~~~~~
-To get information on a particular package from the full list, run
-``spack info <package name>``. For example, for ``mpich`` the output
-looks like this:
+To get more information on a particular package from `spack list`, use
+`spack info`. Just supply the name of a package:
.. command-output:: spack info mpich
-This includes basic information about the package: where to download
-it, its dependencies, virtual packages it provides (e.g. an MPI
-implementation will provide the MPI interface), and a text
-description, if one is available. :ref:`Dependencies
-<sec-specs>` and :ref:`virtual dependencies
-<sec-virtual-dependencies>` are described in more detail later.
+Most of the information is self-explanatory. *Safe versions* are
+versions that Spack has a checksum for, and Spack will use the
+checksum to ensure they downloaded without any errors or malicious
+attacks. :ref:`Dependencies <sec-specs>` and :ref:`virtual
+dependencies <sec-virtual-dependencies>`, are described in more detail
+later.
+
+.. _spack-versions:
``spack versions``
~~~~~~~~~~~~~~~~~~~~~~~~
-To see available versions of a package, run ``spack versions``, for
-example:
+To see *more* available versions of a package, run ``spack versions``,
+for example:
.. command-output:: spack versions libelf
-Since it has to manage many different software packages, Spack doesn't
-place many restrictions on what a package version has to look like.
-Packages like ``mpich`` use traditional version numbers like
-``3.0.4``. Other packages, like ``libdwarf`` use date-stamp versions
-like ``20130729``. Versions can contain numbers, letters, dashes,
-underscores, and periods.
+There are two sections in the output. *Safe versions* are ones that
+have already been checksummed. Spack goes a step further, though, and
+also shows you what versions are available out on the web---these are
+*remote versions*. Spack gets this information by scraping it
+directly from web pages. Depending on the package, Spack may or may
+not be able to find any remote versions.
+
+
+Installing and uninstalling
+------------------------------
+
+Now that you know how to list available packages and versions, you're
+ready to start installing things.
+
+.. _spack-install:
+
+``spack install``
+~~~~~~~~~~~~~~~~~~~~~
+
+``spack install`` will install any package shown by ``spack list``.
+To install the latest version of a package, along with all of its
+dependencies, simply give it a package name:
+
+.. code-block:: sh
+
+ $ spack install mpileaks
+
+If `mpileaks` depends on other packages, Spack will install those
+first. It then fetches the tarball for ``mpileaks``, expands it,
+verifies that it was downloaded without errors, builds it, and
+installs it in its own directory under ``$SPACK_HOME/opt``. You'll see
+a number of messages from spack, a lot of build output, and a message
+that the packages is installed:
+
+.. code-block:: sh
+
+ $ spack install mpileaks
+ ==> Installing mpileaks
+ ==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
+ ==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
+ ==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
+ ==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
+ ######################################################################## 100.0%
+ ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
+ ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23.
+ ==> No patches needed for mpileaks.
+ ==> Building mpileaks.
+
+ ... build output ...
+
+ ==> Successfully installed mpileaks.
+ Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
+ [+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
+
+The last line, with the ``[+]``, indicates where the package is
+installed.
+
+Building a specific version
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Spack can also build *specific versions* of a package. To do this,
+just add ``@`` after the package name, followed by a version:
+
+.. code-block:: sh
+
+ $ spack install mpich@3.0.4
+
+Any number of versions of the same package can be installed at once
+without interfering with each other. This is good for multi-user
+sites, as installing a version that one user needs will not disrupt
+existing installations for other users.
+
+In addition to different versions, Spack can customize the compiler,
+compile-time options (variants), and platform (for cross compiles) of
+an installation. Spack is unique in that it can also configure the
+*dependencies* a package is built with. For example, two
+configurations of the same version of a package, one built with boost
+1.39.0, and the other version built with version 1.43.0, can coexist.
+
+This can all be done on the command line using special syntax. Spack
+calls the descriptor used to refer to a particular package
+configuration a **spec**. In the command lines above, both
+``mpileaks`` and ``mpileaks@3.0.4`` are specs. Specs are described in
+detail in :ref:`sec-specs`.
+
+.. _spack-uninstall:
+
+``spack uninstall``
+~~~~~~~~~~~~~~~~~~~~~
+
+To uninstall a package, type ``spack uninstall <package>``. This will
+completely remove the directory in which the package was installed.
+
+.. code-block:: sh
+
+ spack uninstall mpich
-Compiler Configuration
+If there are still installed packages that depend on the package to be
+uninstalled, spack will refuse to uninstall it. You can override this
+behavior with ``spack uninstall -f <package>``, but you risk breaking
+other installed packages. In general, it is safer to remove dependent
+packages *before* removing their dependencies.
+
+A line like ``spack uninstall mpich`` may be ambiguous, if multiple
+``mpich`` configurations are installed. For example, if both
+``mpich@3.0.2`` and ``mpich@3.1`` are installed, ``mpich`` could refer
+to either one. Because it cannot determine which one to uninstall,
+Spack will ask you to provide a version number to remove the
+ambiguity. As an example, ``spack uninstall mpich@3.1`` is
+unambiguous in this scenario.
+
+
+Seeing installed packages
+-----------------------------------
+
+We know that ``spack list`` shows you the names of available packages,
+but how do you figure out which are installed?
+
+.. _spack-find:
+
+``spack find``
+~~~~~~~~~~~~~~~~~~~~~~
+
+``spack find`` shows the *specs* of installed packages. A spec is
+like a name, but it has a version, compiler, architecture, and build
+options associated with it. In spack, you can have many installations
+of the same package with different specs.
+
+Running ``spack find`` with no arguments lists installed packages:
+
+.. code-block:: sh
+
+ $ spack find
+ ==> 74 installed packages.
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
+ adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
+ atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
+ boost@1.55.0 libelf@0.8.13 py-nose@1.3.4
+ bzip2@1.0.6 libffi@3.1 py-numpy@1.9.1
+ cairo@1.14.0 libmng@2.0.2 py-pygments@2.0.1
+ callpath@1.0.2 libpng@1.6.16 py-pyparsing@2.0.3
+ cmake@3.0.2 libtiff@4.0.3 py-pyside@1.2.2
+ dbus@1.8.6 libtool@2.4.2 py-pytz@2014.10
+ dbus@1.9.0 libxcb@1.11 py-setuptools@11.3.1
+ dyninst@8.1.2 libxml2@2.9.2 py-six@1.9.0
+ fontconfig@2.11.1 libxml2@2.9.2 python@2.7.8
+ freetype@2.5.3 llvm@3.0 qhull@1.0
+ gdk-pixbuf@2.31.2 memaxes@0.5 qt@4.8.6
+ glib@2.42.1 mesa@8.0.5 qt@5.4.0
+ graphlib@2.0.0 mpich@3.0.4 readline@6.3
+ gtkplus@2.24.25 mpileaks@1.0 sqlite@3.8.5
+ harfbuzz@0.9.37 mrnet@4.1.0 stat@2.1.0
+ hdf5@1.8.13 ncurses@5.9 tcl@8.6.3
+ icu@54.1 netcdf@4.3.3 tk@src
+ jpeg@9a openssl@1.0.1h vtk@6.1.0
+ launchmon@1.0.1 pango@1.36.8 xcb-proto@1.11
+ lcms@2.6 pixman@0.32.6 xz@5.2.0
+ libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
+
+ -- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
+ libelf@0.8.10 mpich@3.0.4
+
+Packages are divided into groups according to their architecture and
+compiler. Within each group, Spack tries to keep the view simple, and
+only shows the version of installed packages.
+
+In some cases, there may be different configurations of the *same*
+version of a package installed. For example, there are two
+installations of of ``libdwarf@20130729`` above. We can look at them
+in more detail using ``spack find -d``, and by asking only to show
+``libdwarf`` packages:
+
+.. code-block:: sh
+
+ $ spack find --deps libdwarf
+ ==> 2 installed packages.
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ libdwarf@20130729-d9b90962
+ ^libelf@0.8.12
+ libdwarf@20130729-b52fac98
+ ^libelf@0.8.13
+
+Now we see that the two instances of ``libdwarf`` depend on
+*different* versions of ``libelf``: 0.8.12 and 0.8.13. This view can
+become complicated for packages with many dependencies. If you just
+want to know whether two packages' dependencies differ, you can use
+``spack find -l``:
+
+.. code-block:: sh
+
+ $ spack find -l libdwarf
+ ==> 2 installed packages.
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
+
+Now the ``libwarf`` installs have hashes after their names. These are
+hashes over all of the dependencies of each package. If the hashes
+are the same, then the packages have the same dependency configuration.
+
+If you want to know the path where each package is installed, you can
+use ``spack find -p``:
+
+.. code-block:: sh
+
+ $ spack find -p
+ ==> 74 installed packages.
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
+ adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
+ atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
+ boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
+ bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
+ cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
+ callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
+ ...
+
+And, finally, you can restrict your search to a particular package
+by supplying its name:
+
+.. code-block:: sh
+
+ $ spack find -p libelf
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
+ libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
+ libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
+
+``spack find`` actually does a lot more than this. You can use
+*specs* to query for specific configurations and builds of each
+package. If you want to find only libelf versions greater than version
+0.8.12, you could say:
+
+.. code-block:: sh
+
+ $ spack find libelf@0.8.12:
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ libelf@0.8.12 libelf@0.8.13
+
+Finding just the versions of libdwarf built with a particular version
+of libelf would look like this:
+
+.. code-block:: sh
+
+ $ spack find -l libdwarf ^libelf@0.8.12
+ ==> 1 installed packages.
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ libdwarf@20130729-d9b90962
+
+The full spec syntax is discussed in detail in :ref:`sec-specs`.
+
+
+Compiler configuration
-----------------------------------
Spack has the ability to build packages with multiple compilers and
@@ -72,6 +320,8 @@ compiler versions. Spack searches for compilers on your machine
automatically the first time it is run. It does this by inspecting
your path.
+.. _spack-compilers:
+
``spack compilers``
~~~~~~~~~~~~~~~~~~~~~~~
@@ -98,6 +348,8 @@ compilers`` or ``spack compiler list``::
Any of these compilers can be used to build Spack packages. More on
how this is done is in :ref:`sec-specs`.
+.. _spack-compiler-add:
+
``spack compiler add``
~~~~~~~~~~~~~~~~~~~~~~~
@@ -110,15 +362,19 @@ where the compiler is installed. For example::
intel@13.0.079
Or you can run ``spack compiler add`` with no arguments to force
-autodetection. This is useful if you do not know where compilers
-live, but new compilers have been added to your ``PATH``. For
-example, using dotkit, you might do this::
+auto-detection. This is useful if you do not know where compilers are
+installed, but you know that new compilers have been added to your
+``PATH``. For example, using dotkit, you might do this::
- $ use gcc-4.9.0
+ $ module load gcc-4.9.0
$ spack compiler add
==> Added 1 new compiler to /Users/gamblin2/.spackconfig
gcc@4.9.0
+This loads the environment module for gcc-4.9.0 to get it into the
+``PATH``, and then it adds the compiler to Spack.
+
+.. _spack-compiler-info:
``spack compiler info``
~~~~~~~~~~~~~~~~~~~~~~~
@@ -126,20 +382,23 @@ example, using dotkit, you might do this::
If you want to see specifics on a particular compiler, you can run
``spack compiler info`` on it::
- $ spack compiler info intel@12.1.3
- intel@12.1.3:
- cc = /usr/local/bin/icc-12.1.293
- cxx = /usr/local/bin/icpc-12.1.293
- f77 = /usr/local/bin/ifort-12.1.293
- fc = /usr/local/bin/ifort-12.1.293
+ $ spack compiler info intel@15
+ intel@15.0.0:
+ cc = /usr/local/bin/icc-15.0.090
+ cxx = /usr/local/bin/icpc-15.0.090
+ f77 = /usr/local/bin/ifort-15.0.090
+ fc = /usr/local/bin/ifort-15.0.090
This shows which C, C++, and Fortran compilers were detected by Spack.
+Notice also that we didn't have to be too specific about the
+version. We just said ``intel@15``, and information about the only
+matching Intel compiler was displayed.
-Manual configuration
-~~~~~~~~~~~~~~~~~~~~~~~
+Manual compiler configuration
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-If autodetection fails, you can manually conigure a compiler by
+If auto-detection fails, you can manually configure a compiler by
editing your ``~/.spackconfig`` file. You can do this by running
``spack config edit``, which will open the file in your ``$EDITOR``.
@@ -153,8 +412,8 @@ Each compiler configuration in the file looks like this::
fc = /usr/local/bin/ifort-15.0.024-beta
...
-For compilers, like ``clang``, that do not support Fortran, you can simply
-put ``None`` for ``f77`` and ``fc``::
+For compilers, like ``clang``, that do not support Fortran, put
+``None`` for ``f77`` and ``fc``::
[compiler "clang@3.3svn"]
cc = /usr/bin/clang
@@ -163,180 +422,18 @@ put ``None`` for ``f77`` and ``fc``::
fc = None
Once you save the file, the configured compilers will show up in the
-list displayed when you run ``spack compilers``.
-
-
-Seeing installed packages -----------------------------------
-
-``spack find``
-~~~~~~~~~~~~~~~~~~~~~~
-
-The second thing you're likely to want to do with Spack, and the first
-thing users of your system will likely want to do, is to find what
-software is already installed and ready to use. You can do that with
-``spack find``.
-
-Running ``spack find`` with no arguments will list all the installed
-packages:
-
-.. code-block:: sh
-
- $ spack find
- == chaos_5_x86_64_ib ===========================================
- -- gcc@4.4.7 ---------------------------------------------------
- libdwarf@20130207-d9b909
- libdwarf@20130729-d9b909
- libdwarf@20130729-b52fac
- libelf@0.8.11
- libelf@0.8.12
- libelf@0.8.13
-
-Packages are grouped by architecture, then by the compiler used to
-build them, and then by their versions and options. If a package has
-dependencies, there will also be a hash at the end of the name
-indicating the dependency configuration. Packages with the same hash
-have the same dependency configuration. If you want ALL information
-about dependencies, as well, then you can supply ``-l`` or ``--long``:
-
-.. code-block:: sh
-
- $ spack find -l
- == chaos_5_x86_64_ib ===========================================
- -- gcc@4.4.7 ---------------------------------------------------
- libdwarf@20130207
- ^libelf@0.8.12
- libdwarf@20130729
- ^libelf@0.8.12
- libdwarf@20130729
- ^libelf@0.8.13
- libelf@0.8.11
- libelf@0.8.12
- libelf@0.8.13
-
-Now you can see which versions of ``libelf`` each version of
-``libdwarf`` was built with.
-
-If you want to know the path where each of these packages is
-installed, do ``spack find -p`` or ``--path``:
-
-.. code-block:: sh
-
- $ spack find -p
- == chaos_5_x86_64_ib ===========================================
- -- gcc@4.4.7 ---------------------------------------------------
- libdwarf@20130207-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130207-d9b909
- libdwarf@20130729-d9b909 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-d9b909
- libdwarf@20130729-b52fac /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libdwarf@20130729-b52fac
- libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
- libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
- libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
-
-
-And, finally, you can restrict your search to a particular package
-by supplying its name:
-
-.. code-block:: sh
-
- $ spack find -p libelf
- == chaos_5_x86_64_ib ===========================================
- -- gcc@4.4.7 ---------------------------------------------------
- libelf@0.8.11 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
- libelf@0.8.12 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
- libelf@0.8.13 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
-
-
-``spack find`` actually does a lot more than this. You can use
-*specs* to query for specific configurations and builds of each
-package. The full spec syntax is discussed in detail in
-:ref:`sec-specs`.
-
-
-
-Installing and uninstalling
-------------------------------
-
-``spack install``
-~~~~~~~~~~~~~~~~~~~~~
-
-``spack install`` will install any package that appears in the output
-of ``spack list``. To install the latest version of a pacakge and all
-of its dependencies, simply run ``spack install <package>``:
-
-.. code-block:: sh
-
- spack install mpileaks
-
-Spack will fetch the tarball for ``mpileaks``, expand it, verify that
-it was downloaded without errors, build it, and install it in its own
-directory under ``$SPACK_HOME/opt``. If the requested package depends
-on other packages in order to build, Spack fetches them as well, and
-installs them before it installs the requested package. Like the main
-package, each dependency is also installed in its own directory.
-
-Spack can also build *specific* configurations of a package. For
-example, to install something with a specific version, add ``@`` after
-the package name, followed by a version string:
-
-.. code-block:: sh
-
- spack install mpich@3.0.4
-
-Any number of configurations of the same package can be installed at
-once without interfering with each other. This is good for multi-user
-sites, as installing a version that one user needs will not disrupt
-existing installations for other users.
-
-In addition to version configuraitons, Spack can customize the
-compiler, compile-time options (variants), and platform (for cross
-compiles) of an installation. Spack is unique in that it can also
-configure the *dependencies* a package is built with. For example,
-two configurations of the same version of a package, one built with
-boost 1.39.0, and the other version built with version 1.43.0, can
-coexist.
-
-This can all be done on the command line using special syntax. Spack
-calls the descriptor used to refer to a particular package
-configuration a **spec**. In the command lines above, both
-``mpileaks`` and ``mpileaks@3.0.4`` are specs. To customize
-additional properties, simply add more attributes to the spec. Specs
-and their syntax are covered in more detail in :ref:`sec-specs`.
-
-
-``spack uninstall``
-~~~~~~~~~~~~~~~~~~~~~
-
-To uninstall a package, type ``spack uninstall <package>``. This will
-completely remove the directory in which the package was installed.
-
-.. code-block:: sh
-
- spack uninstall mpich
-
-If there are still installed packages that depend on the package to be
-uninstalled, spack will refuse to uninstall. If you know what you're
-doing, you can override this with ``spack uninstall -f <package>``.
-However, running this risks breaking other installed packages. In
-general, it is safer to remove dependent packages *before* removing
-their dependencies.
-
-A line like ``spack uninstall mpich`` may be ambiguous, if multiple
-``mpich`` configurations are installed. For example, if both
-``mpich@3.0.2`` and ``mpich@3.1`` are installed, it could refer to
-either one, and Spack cannot determine which one to uninstall. Spack
-will ask you to provide a version number to remove the ambiguity. For
-example, ``spack uninstall mpich@3.1`` is unambiguous in the above
-scenario.
+list displayed by ``spack compilers``.
.. _sec-specs:
-Specs & Dependencies
+Specs & dependencies
-------------------------
-We now know that ``spack install`` and ``spack uninstall`` both take a
-package name with an optional version specifier. In Spack, that
-descriptor is called a *spec*. Spack uses specs to refer to a
-particular build configuration (or configurations) of a package.
+We know that ``spack install``, ``spack uninstall``, and other
+commands take a package name with an optional version specifier. In
+Spack, that descriptor is called a *spec*. Spack uses specs to refer
+to a particular build configuration (or configurations) of a package.
Specs are more than a package name and a version; you can use them to
specify the compiler, compiler version, architecture, compile options,
and dependency options for a build. In this section, we'll go over
@@ -499,6 +596,11 @@ based on site policies.
Variants
~~~~~~~~~~~~~~~~~~~~~~~
+.. Note::
+
+ Variants are not yet supported, but will be in the next Spack
+ release (0.9), due in Q2 2015.
+
Variants are named options associated with a particular package, and
they can be turned on or off. For example, above, supplying
``+debug`` causes ``mpileaks`` to be built with debug flags. The
@@ -544,6 +646,11 @@ the command line is provided for convenience and legibility.
Architecture specifier
~~~~~~~~~~~~~~~~~~~~~~~
+.. Note::
+
+ Architecture specifiers are part of specs but are not yet
+ functional. They will be in Spack version 1.0, due in Q3 2015.
+
The architecture specifier starts with a ``=`` and also comes after
some package name within a spec. It allows a user to specify a
particular architecture for the package to be built. This is mostly
@@ -627,6 +734,8 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
+.. _spack-providers:
+
``spack providers``
~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -646,7 +755,7 @@ versions are now filtered out.
.. _shell-support:
-Environment Modules
+Environment modules
-------------------------------
.. note::
@@ -678,10 +787,6 @@ For ``csh`` and ``tcsh`` run:
You can put the above code in your ``.bashrc`` or ``.cshrc``, and
Spack's shell support will be available on the command line.
-
--------------------------------
-
-
When you install a package with Spack, it automatically generates an
environment module that lets you add the package to your environment.
@@ -698,6 +803,7 @@ The directories are automatically added to your ``MODULEPATH`` and
``DK_NODE`` environment variables when you enable Spack's `shell
support <shell-support_>`_.
+
Using Modules & Dotkits
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -710,7 +816,7 @@ of installed packages.
$ module avail
- ------- /g/g21/gamblin2/src/spack/share/spack/modules/chaos_5_x86_64_ib --------
+ ------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
@@ -845,6 +951,8 @@ if newer, fancier module support is added to Spack at some later date,
you may want to regenerate all the modules to take advantage of these
new features.
+.. _spack-module:
+
``spack module refresh``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -858,9 +966,226 @@ regenerate all module and dotkit files from scratch:
==> Regenerating tcl module files.
==> Regenerating dotkit module files.
+
+.. _extensions:
+
+Extensions & Python support
+------------------------------------
+
+Spack's installation model assumes that each package will live in its
+own install prefix. However, certain packages are typically installed
+*within* the directory hierarchy of other packages. For example,
+modules in interpreted languages like `Python
+<https://www.python.org>`_ are typically installed in the
+``$prefix/lib/python-2.7/site-packages`` directory.
+
+Spack has support for this type of installation as well. In Spack,
+a package that can live inside the prefix of another package is called
+an *extension*. Suppose you have Python installed like so:
+
+.. code-block:: sh
+
+ $ spack find python
+ ==> 1 installed packages.
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ python@2.7.8
+
+.. _spack-extensions:
+
+``spack extensions``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+You can find extensions for your Python installation like this:
+
+.. code-block:: sh
+
+ $ spack extensions python
+ ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+ ==> 36 extensions:
+ geos py-ipython py-pexpect py-pyside py-sip
+ py-basemap py-libxml2 py-pil py-pytz py-six
+ py-biopython py-mako py-pmw py-rpy2 py-sympy
+ py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
+ py-dateutil py-mpi4py py-pygments py-scikit-learn
+ py-epydoc py-mx py-pylint py-scipy
+ py-gnuplot py-nose py-pyparsing py-setuptools
+ py-h5py py-numpy py-pyqt py-shiboken
+
+ ==> 12 installed:
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
+ py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
+ py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
+ py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
+
+ ==> None activated.
+
+The extensions are a subset of what's returned by ``spack list``, and
+they are packages like any other. They are installed into their own
+prefixes, and you can see this with ``spack find -p``:
+
+.. code-block:: sh
+
+ $ spack find -p py-numpy
+ ==> 1 installed packages.
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
+
+However, even though this package is installed, you cannot use it
+directly when you run ``python``:
+
+.. code-block:: sh
+
+ $ spack load python
+ $ python
+ Python 2.7.8 (default, Feb 17 2015, 01:35:25)
+ [GCC 4.4.7 20120313 (Red Hat 4.4.7-11)] on linux2
+ Type "help", "copyright", "credits" or "license" for more information.
+ >>> import numpy
+ Traceback (most recent call last):
+ File "<stdin>", line 1, in <module>
+ ImportError: No module named numpy
+ >>>
+
+Extensions & Environment Modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+There are two ways to get ``numpy`` working in Python. The first is
+to use :ref:`shell-support`. You can simply ``use`` or ``load`` the
+module for the extension, and it will be added to the ``PYTHONPATH``
+in your current shell.
+
+For tcl modules:
+
+.. code-block:: sh
+
+ $ spack load python
+ $ spack load py-numpy
+
+or, for dotkit:
+
+.. code-block:: sh
+
+ $ spack use python
+ $ spack use py-numpy
+
+Now ``import numpy`` will succeed for as long as you keep your current
+session open.
+
+
+Activating Extensions
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+It is often desirable to have certain packages *always* available as
+part of a Python installation. Spack offers a more permanent solution
+for this case. Instead of requiring users to load particular
+environment modules, you can *activate* the package within the Python
+installation:
+
+.. _spack-activate:
+
+``spack activate``
+^^^^^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: sh
+
+ $ spack activate py-numpy
+ ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+
+Several things have happened here. The user requested that
+``py-numpy`` be activated in the ``python`` installation it was built
+with. Spack knows that ``py-numpy`` depends on ``py-nose`` and
+``py-setuptools``, so it activated those packages first. Finally,
+once all dependencies were activated in the ``python`` installation,
+``py-numpy`` was activated as well.
+
+If we run ``spack extensions`` again, we now see the three new
+packages listed as activated:
+
+.. code-block:: sh
+
+ $ spack extensions python
+ ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+ ==> 36 extensions:
+ geos py-ipython py-pexpect py-pyside py-sip
+ py-basemap py-libxml2 py-pil py-pytz py-six
+ py-biopython py-mako py-pmw py-rpy2 py-sympy
+ py-cython py-matplotlib py-pychecker py-scientificpython py-virtualenv
+ py-dateutil py-mpi4py py-pygments py-scikit-learn
+ py-epydoc py-mx py-pylint py-scipy
+ py-gnuplot py-nose py-pyparsing py-setuptools
+ py-h5py py-numpy py-pyqt py-shiboken
+
+ ==> 12 installed:
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
+ py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
+ py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
+ py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
+
+ ==> 3 currently activated:
+ -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
+
+
+Now, when a user runs python, ``numpy`` will be available for import
+*without* the user having to explicitly loaded. ``python@2.7.8`` now
+acts like a system Python installation with ``numpy`` installed inside
+of it.
+
+Spack accomplishes this by symbolically linking the *entire* prefix of
+the ``py-numpy`` into the prefix of the ``python`` package. To the
+python interpreter, it looks like ``numpy`` is installed in the
+``site-packages`` directory.
+
+The only limitation of activation is that you can only have a *single*
+version of an extension activated at a time. This is because multiple
+versions of the same extension would conflict if symbolically linked
+into the same prefix. Users who want a different version of a package
+can still get it by using environment modules, but they will have to
+explicitly load their preferred version.
+
+``spack activate -f``
+^^^^^^^^^^^^^^^^^^^^^^^^^
+If, for some reason, you want to activate a package *without* its
+dependencies, you can use ``spack activate -f``:
+
+.. code-block:: sh
+
+ $ spack activate -f py-numpy
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+
+.. _spack-deactivate:
+
+``spack deactivate``
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+We've seen how activating an extension can be used to set up a default
+version of a Python module. Obviously, you may want to change that at
+some point. ``spack deactivate`` is the command for this. There are
+several variants:
+
+ * ``spack deactivate <extension>`` will deactivate a single
+ extension. If another activated extension depends on this one,
+ Spack will warn you and exit with an error.
+ * ``spack deactivate -f <extension>`` deactivates an extension
+ regardless of packages that depend on it.
+ * ``spack deactivate -a <extension>`` deactivates an extension and
+ all of its dependencies. Use ``-f`` to disregard dependents.
+ * ``spack deactivate -a <extendee>`` deactivates *all* activated
+ extensions of a package. For example, to deactivate *all* python
+ extensions, use::
+
+ spack deactivate -a python
+
+
Getting Help
-----------------------
+.. _spack-help:
+
``spack help``
~~~~~~~~~~~~~~~~~~~~~~
diff --git a/lib/spack/docs/command_index.in b/lib/spack/docs/command_index.in
new file mode 100644
index 0000000000..94cdf38109
--- /dev/null
+++ b/lib/spack/docs/command_index.in
@@ -0,0 +1,10 @@
+.. _command_index:
+
+Command index
+=================
+
+This is an alphabetical list of commands with links to the places they
+appear in the documentation.
+
+.. hlist::
+ :columns: 3
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index b4d49c594d..86df113074 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -35,7 +35,9 @@
# All configuration values have a default; values that are commented out
# serve to show the default.
-import sys, os
+import sys
+import os
+import subprocess
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
@@ -43,14 +45,16 @@ import sys, os
sys.path.insert(0, os.path.abspath('exts'))
# Add the Spack bin directory to the path so that we can use its output in docs.
-os.environ['SPACK_ROOT'] = '../../..'
+spack_root = '../../..'
+os.environ['SPACK_ROOT'] = spack_root
os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
+spack_version = subprocess.Popen(
+ ['spack', '-V'], stderr=subprocess.PIPE).communicate()[1].strip().split('.')
+
# Set an environment variable so that colify will print output like it would to
# a terminal.
-os.environ['COLIFY_TTY'] = 'true'
-os.environ['COLUMNS'] = '80'
-os.environ['LINES'] = '25'
+os.environ['COLIFY_SIZE'] = '25x80'
# Enable todo items
todo_include_todos = True
@@ -97,9 +101,9 @@ copyright = u'2013-2014, Lawrence Livermore National Laboratory'
# built documents.
#
# The short X.Y version.
-version = '1.0'
+version = '.'.join(spack_version[:2])
# The full version, including alpha/beta/rc tags.
-release = '1.0'
+release = '.'.join(spack_version[:2])
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst
index 969ed60b15..db47de80f5 100644
--- a/lib/spack/docs/developer_guide.rst
+++ b/lib/spack/docs/developer_guide.rst
@@ -50,11 +50,11 @@ as a descriptor for one or more instances of that template. Users
express the configuration they want using a spec, and a package turns
the spec into a complete build.
-The obvious difficulty with this design is that users underspecify
+The obvious difficulty with this design is that users under-specify
what they want. To build a software package, the package object needs
a *complete* specification. In Spack, if a spec describes only one
instance of a package, then we say it is **concrete**. If a spec
-could describes many instances, (i.e. it is underspecified in one way
+could describes many instances, (i.e. it is under-specified in one way
or another), then we say it is **abstract**.
Spack's job is to take an *abstract* spec from the user, find a
@@ -92,7 +92,7 @@ with a high level view of Spack's directory structure::
Spack is designed so that it could live within a `standard UNIX
directory hierarchy <http://linux.die.net/man/7/hier>`_, so ``lib``,
``var``, and ``opt`` all contain a ``spack`` subdirectory in case
-Spack is installed alongside other software. Most of the insteresting
+Spack is installed alongside other software. Most of the interesting
parts of Spack live in ``lib/spack``. Files under ``var`` are created
as needed, so there is no ``var`` directory when you initially clone
Spack from the repository.
@@ -123,13 +123,13 @@ Package-related modules
Contains the :class:`Package <spack.package.Package>` class, which
is the superclass for all packages in Spack. Methods on ``Package``
implement all phases of the :ref:`package lifecycle
- <pacakge-lifecycle>` and manage the build process.
+ <package-lifecycle>` and manage the build process.
:mod:`spack.packages`
Contains all of the packages in Spack and methods for managing them.
Functions like :func:`packages.get <spack.packages.get>` and
:func:`class_name_for_package_name
- <packages.class_name_for_package_name>` handle mapping packge module
+ <packages.class_name_for_package_name>` handle mapping package module
names to class names and dynamically instantiating packages by name
from module files.
diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst
index b39dcd3390..fcb810086d 100644
--- a/lib/spack/docs/features.rst
+++ b/lib/spack/docs/features.rst
@@ -1,4 +1,4 @@
-Feature Overview
+Feature overview
==================
This is a high-level overview of features that make Spack different
diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst
index 73eff43ab7..2382678cc3 100644
--- a/lib/spack/docs/index.rst
+++ b/lib/spack/docs/index.rst
@@ -46,8 +46,10 @@ Table of Contents
getting_started
basic_usage
packaging_guide
+ mirrors
site_configuration
developer_guide
+ command_index
package_list
API Docs <spack>
diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst
new file mode 100644
index 0000000000..d732a3dd54
--- /dev/null
+++ b/lib/spack/docs/mirrors.rst
@@ -0,0 +1,217 @@
+.. _mirrors:
+
+Mirrors
+============================
+
+Some sites may not have access to the internet for fetching packages.
+These sites will need a local repository of tarballs from which they
+can get their files. Spack has support for this with *mirrors*. A
+mirror is a URL that points to a directory, either on the local
+filesystem or on some server, containing tarballs for all of Spack's
+packages.
+
+Here's an example of a mirror's directory structure::
+
+ mirror/
+ cmake/
+ cmake-2.8.10.2.tar.gz
+ dyninst/
+ dyninst-8.1.1.tgz
+ dyninst-8.1.2.tgz
+ libdwarf/
+ libdwarf-20130126.tar.gz
+ libdwarf-20130207.tar.gz
+ libdwarf-20130729.tar.gz
+ libelf/
+ libelf-0.8.12.tar.gz
+ libelf-0.8.13.tar.gz
+ libunwind/
+ libunwind-1.1.tar.gz
+ mpich/
+ mpich-3.0.4.tar.gz
+ mvapich2/
+ mvapich2-1.9.tgz
+
+The structure is very simple. There is a top-level directory. The
+second level directories are named after packages, and the third level
+contains tarballs for each package, named after each package.
+
+.. note::
+
+ Archives are **not** named exactly they were in the package's fetch
+ URL. They have the form ``<name>-<version>.<extension>``, where
+ ``<name>`` is Spack's name for the package, ``<version>`` is the
+ version of the tarball, and ``<extension>`` is whatever format the
+ package's fetch URL contains.
+
+ In order to make mirror creation reasonably fast, we copy the
+ tarball in its original format to the mirror directory, but we do
+ not standardize on a particular compression algorithm, because this
+ would potentially require expanding and re-compressing each archive.
+
+.. _spack-mirror:
+
+``spack mirror``
+----------------------------
+
+Mirrors are managed with the ``spack mirror`` command. The help for
+``spack mirror`` looks like this::
+
+ $ spack mirror -h
+ usage: spack mirror [-h] SUBCOMMAND ...
+
+ positional arguments:
+ SUBCOMMAND
+ create Create a directory to be used as a spack mirror, and fill
+ it with package archives.
+ add Add a mirror to Spack.
+ remove Remove a mirror by name.
+ list Print out available mirrors to the console.
+
+ optional arguments:
+ -h, --help show this help message and exit
+
+The ``create`` command actually builds a mirror by fetching all of its
+packages from the internet and checksumming them.
+
+The other three commands are for managing mirror configuration. They
+control the URL(s) from which Spack downloads its packages.
+
+.. _spack-mirror-create:
+
+``spack mirror create``
+----------------------------
+
+You can create a mirror using the ``spack mirror create`` command, assuming
+you're on a machine where you can access the internet.
+
+The command will iterate through all of Spack's packages and download
+the safe ones into a directory structure like the one above. Here is
+what it looks like:
+
+
+.. code-block:: bash
+
+ $ spack mirror create libelf libdwarf
+ ==> Created new mirror in spack-mirror-2014-06-24
+ ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
+ ########################################################## 81.6%
+ ==> Checksum passed for libelf@0.8.13
+ ==> Added libelf@0.8.13
+ ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz
+ ###################################################################### 98.6%
+ ==> Checksum passed for libelf@0.8.12
+ ==> Added libelf@0.8.12
+ ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz
+ ###################################################################### 97.3%
+ ==> Checksum passed for libdwarf@20130207
+ ==> Added libdwarf@20130207
+ ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz
+ ######################################################## 78.9%
+ ==> Checksum passed for libdwarf@20130126
+ ==> Added libdwarf@20130126
+ ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz
+ ############################################################# 84.7%
+ ==> Added libdwarf@20130729
+ ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror
+ ==> Added python@2.7.8.
+ ==> Successfully updated mirror in spack-mirror-2015-02-24.
+ Archive stats:
+ 0 already present
+ 5 added
+ 0 failed to fetch.
+
+Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
+copy it over to the machine you want it hosted on.
+
+Custom package sets
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Normally, ``spack mirror create`` downloads all the archives it has
+checksums for. If you want to only create a mirror for a subset of
+packages, you can do that by supplying a list of package specs on the
+command line after ``spack mirror create``. For example, this
+command::
+
+ $ spack mirror create libelf@0.8.12: boost@1.44:
+
+Will create a mirror for libelf versions greater than or equal to
+0.8.12 and boost versions greater than or equal to 1.44.
+
+Mirror files
+~~~~~~~~~~~~~~~~~~~~~~~
+
+If you have a *very* large number of packages you want to mirror, you
+can supply a file with specs in it, one per line::
+
+ $ cat specs.txt
+ libdwarf
+ libelf@0.8.12:
+ boost@1.44:
+ boost@1.39.0
+ ...
+ $ spack mirror create -f specs.txt
+ ...
+
+This is useful if there is a specific suite of software managed by
+your site.
+
+.. _spack-mirror-add:
+
+``spack mirror add``
+----------------------------
+
+Once you have a mirror, you need to let spack know about it. This is
+relatively simple. First, figure out the URL for the mirror. If it's
+a file, you can use a file URL like this one::
+
+ file:///Users/gamblin2/spack-mirror-2014-06-24
+
+That points to the directory on the local filesystem. If it were on a
+web server, you could use a URL like this one:
+
+ https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
+
+Spack will use the URL as the root for all of the packages it fetches.
+You can tell your Spack installation to use that mirror like this:
+
+.. code-block:: bash
+
+ $ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
+
+Each mirror has a name so that you can refer to it again later.
+
+.. _spack-mirror-list:
+
+``spack mirror list``
+----------------------------
+
+If you want to see all the mirrors Spack knows about you can run ``spack mirror list``::
+
+ $ spack mirror list
+ local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
+
+.. _spack-mirror-remove:
+
+``spack mirror remove``
+----------------------------
+
+And, if you want to remove a mirror, just remove it by name::
+
+ $ spack mirror remove local_filesystem
+ $ spack mirror list
+ ==> No mirrors configured.
+
+Mirror precedence
+----------------------------
+
+Adding a mirror really just adds a section in ``~/.spackconfig``::
+
+ [mirror "local_filesystem"]
+ url = file:///Users/gamblin2/spack-mirror-2014-06-24
+ [mirror "remote_server"]
+ url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
+
+If you want to change the order in which mirrors are searched for
+packages, you can edit this file and reorder the sections. Spack will
+search the topmost mirror first and the bottom-most mirror last.
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index ec2ca4d099..59ba63fa35 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -5,7 +5,7 @@ Packaging Guide
This guide is intended for developers or administrators who want to
package software so that Spack can install it. It assumes that you
-have at least some familiarty with Python, and that you've read the
+have at least some familiarity with Python, and that you've read the
:ref:`basic usage guide <basic-usage>`, especially the part about
:ref:`specs <sec-specs>`.
@@ -16,18 +16,19 @@ There are two key parts of Spack:
software according to a spec.
Specs allow a user to describe a *particular* build in a way that a
-package author can understand. Packages allow a developer to
-encapsulate the logic build logic for different versions, compilers,
+package author can understand. Packages allow a the packager to
+encapsulate the build logic for different versions, compilers,
options, platforms, and dependency combinations in one place.
+Essentially, a package translates a spec into build logic.
Packages in Spack are written in pure Python, so you can do anything
in Spack that you can do in Python. Python was chosen as the
implementation language for two reasons. First, Python is becoming
-ubiquitous in the HPC community due to its use in numerical codes.
-Second, it's a modern language and has many powerful features to help
-make package writing easy.
+ubiquitous in the scientific software community. Second, it's a modern
+language and has many powerful features to help make package writing
+easy.
-Creating & Editing Packages
+Creating & editing packages
----------------------------------
.. _spack-create:
@@ -35,24 +36,23 @@ Creating & Editing Packages
``spack create``
~~~~~~~~~~~~~~~~~~~~~
-The ``spack create`` command generates boilerplate package template
-from a URL pointing to a tarball or other software archive. In most
-cases, you'll only need to run this once, then slightly modify the
-boilerplate to get your package working.
+The ``spack create`` command generates a boilerplate package template
+from a URL. The URL should point to a tarball or other software
+archive. In most cases, ``spack create`` plus a few modifications is
+all you need to get a package working.
-All you need is the URL to a tarball (other archive formats are ok
-too) you want to package:
+Here's an example:
.. code-block:: sh
$ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
-When you run this, Spack looks at the tarball URL and tries to figure
-out the name of the package to be created. It also tries to determine
-out what version strings look like for this package. Using this
-information, it tries to find *additional* versions by spidering the
-package's webpage. If it finds multiple versions, Spack prompts you
-to tell it how many versions you want to download and checksum.
+Spack examines the tarball URL and tries to figure out the name of the
+package to be created. It also tries to determine what version strings
+look like for this package. Using this information, it will try to
+find *additional* versions by spidering the package's webpage. If it
+finds multiple versions, Spack prompts you to tell it how many
+versions you want to download and checksum:
.. code-block:: sh
@@ -63,12 +63,6 @@ to tell it how many versions you want to download and checksum.
2.8.12.1 http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
2.8.12 http://www.cmake.org/files/v2.8/cmake-2.8.12.tar.gz
2.8.11.2 http://www.cmake.org/files/v2.8/cmake-2.8.11.2.tar.gz
- 2.8.11.1 http://www.cmake.org/files/v2.8/cmake-2.8.11.1.tar.gz
- 2.8.11 http://www.cmake.org/files/v2.8/cmake-2.8.11.tar.gz
- 2.8.10.2 http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
- 2.8.10.1 http://www.cmake.org/files/v2.8/cmake-2.8.10.1.tar.gz
- 2.8.10 http://www.cmake.org/files/v2.8/cmake-2.8.10.tar.gz
- 2.8.9 http://www.cmake.org/files/v2.8/cmake-2.8.9.tar.gz
...
2.8.0 http://www.cmake.org/files/v2.8/cmake-2.8.0.tar.gz
@@ -77,10 +71,30 @@ to tell it how many versions you want to download and checksum.
Spack will automatically download the number of tarballs you specify
(starting with the most recent) and checksum each of them.
-Note that you don't need to do everything up front. If your package
-is large, you can always choose to download just one tarball for now,
-then run :ref:`spack checksum <spack-checksum>` later if you end up
-wanting more. Let's say you choose to download 3 tarballs:
+You do not *have* to download all of the versions up front. You can
+always choose to download just one tarball initially, and run
+:ref:`spack checksum <spack-checksum>` later if you need more.
+
+.. note::
+
+ If ``spack create`` fails to detect the package name correctly,
+ you can try supplying it yourself, e.g.::
+
+ $ spack create --name cmake http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
+
+ If it fails entirely, you can get minimal boilerplate by using
+ :ref:`spack-edit-f`, or you can manually create a directory and
+ ``package.py`` file for the package in ``var/spack/packages``.
+
+.. note::
+
+ Spack can fetch packages from source code repositories, but,
+ ``spack create`` will *not* currently create a boilerplate package
+ from a repository URL. You will need to use :ref:`spack-edit-f`
+ and manually edit the ``version()`` directives to fetch from a
+ repo. See :ref:`vcs-fetch` for details.
+
+Let's say you download 3 tarballs:
.. code-block:: sh
@@ -93,8 +107,8 @@ wanting more. Let's say you choose to download 3 tarballs:
==> Fetching http://www.cmake.org/files/v2.8/cmake-2.8.11.2.tar.gz
#################################################################### 95.2%
-Now Spack generates boilerplate code and opens the new
-``package.py`` file in your favorite ``$EDITOR``:
+Now Spack generates boilerplate code and opens a new ``package.py``
+file in your favorite ``$EDITOR``:
.. code-block:: python
:linenos:
@@ -141,12 +155,6 @@ Now Spack generates boilerplate code and opens the new
The tedious stuff (creating the class, checksumming archives) has been
done for you.
-.. note::
-
- If ``spack create`` fails to download or to detect the package
- version, you can use ``spack edit -f`` to generate simpler
- boilerplate. See the next section for more on this.
-
In the generated package, the download ``url`` attribute is already
set. All the things you still need to change are marked with
``FIXME`` labels. The first ``FIXME`` refers to the commented
@@ -199,27 +207,30 @@ The ``cmake`` package actually lives in
a much simpler shortcut and saves you the trouble of typing the full
path.
-
-``spack edit -f``
-~~~~~~~~~~~~~~~~~~~~
If you try to edit a package that doesn't exist, Spack will recommend
-using ``spack create``:
+using ``spack create`` or ``spack edit -f``:
.. code-block:: sh
$ spack edit foo
==> Error: No package 'foo'. Use spack create, or supply -f/--force to edit a new file.
-As the output advises, You can use ``spack edit -f/--force`` to force
-the creation of a new, *very* simple boilerplate package:
+.. _spack-edit-f:
+
+``spack edit -f``
+~~~~~~~~~~~~~~~~~~~~
+
+``spack edit -f`` can be used to create a new, minimal boilerplate
+package:
.. code-block:: sh
$ spack edit -f foo
-Unlike ``spack create``, which tries to infer names and versions, and
-which actually downloads the tarball and checksums it for you, ``spack
-edit -f`` will substitute dummy values for you to fill in yourself:
+Unlike ``spack create``, which infers names and versions, and which
+actually downloads the tarball and checksums it for you, ``spack edit
+-f`` has no such fanciness. It will substitute dummy values for you
+to fill in yourself:
.. code-block:: python
:linenos:
@@ -243,9 +254,16 @@ This is useful when ``spack create`` cannot figure out the name and
version of your package from the archive URL.
-Naming & Directory Structure
+Naming & directory structure
--------------------------------------
+.. note::
+
+ Spack's default naming and directory structure will change in
+ version 0.9. Specifically, 0.9 will stop using directory names
+ with special characters like ``@``, to avoid interfering with
+ certain packages that do not handle this well.
+
This section describes how packages need to be named, and where they
live in Spack's directory structure. In general, `spack-create`_ and
`spack-edit`_ handle creating package files for you, so you can skip
@@ -264,6 +282,7 @@ package:
.. command-output:: cd $SPACK_ROOT/var/spack/packages; ls -CF
:shell:
+ :ellipsis: 10
Each directory contains a file called ``package.py``, which is where
all the python code for the package goes. For example, the ``libelf``
@@ -280,11 +299,9 @@ Package Names
Packages are named after the directory containing ``package.py``. So,
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
-The ``package.py`` file contains a class called ``Libelf``, which
-extends Spack's ``Package`` class. This is what makes it a Spack
-package:
-
-``var/spack/packages/libelf/package.py``
+The ``package.py`` file defines a class called ``Libelf``, which
+extends Spack's ``Package`` class. for example, here is
+``$SPACK_ROOT/var/spack/packages/libelf/package.py``:
.. code-block:: python
:linenos:
@@ -301,8 +318,9 @@ package:
def install():
...
-The **directory name** (``libelf``) is what users need to provide on
-the command line. e.g., if you type any of these:
+The **directory name** (``libelf``) determines the package name that
+users should provide on the command line. e.g., if you type any of
+these:
.. code-block:: sh
@@ -311,8 +329,8 @@ the command line. e.g., if you type any of these:
Spack sees the package name in the spec and looks for
``libelf/package.py`` in ``var/spack/packages``. Likewise, if you say
-``spack install docbook-xml``, then Spack looks for
-``docbook-xml/package.py``.
+``spack install py-numpy``, then Spack looks for
+``py-numpy/package.py``.
Spack uses the directory name as the package name in order to give
packagers more freedom in naming their packages. Package names can
@@ -342,8 +360,7 @@ some examples:
================= =================
In general, you won't have to remember this naming convention because
-`spack-create`_ and `spack-edit`_ will generate boilerplate for you,
-and you can just fill in the blanks.
+`spack-create`_ and `spack-edit`_ handle the details for you.
Adding new versions
@@ -381,9 +398,8 @@ For the URL above, you might have to add an explicit URL because the
version can't simply be substituted in the original ``url`` to
construct the new one for ``8.2.1``.
-Wehn you supply a custom URL for a version, Spack uses that URL
-*verbatim* when fetching the version, and will *not* perform
-extrapolation.
+When you supply a custom URL for a version, Spack uses that URL
+*verbatim* and does not perform extrapolation.
Checksums
~~~~~~~~~~~~~~~~~
@@ -392,10 +408,11 @@ Spack uses a checksum to ensure that the downloaded package version is
not corrupted or compromised. This is especially important when
fetching from insecure sources, like unencrypted http. By default, a
package will *not* be installed if it doesn't pass a checksum test
-(though users can overried this with ``spack install --no-checksum``).
+(though you can override this with ``spack install --no-checksum``).
Spack can currently support checksums using the MD5, SHA-1, SHA-224,
-SHA-256, SHA-384, and SHA-512 algorithms.
+SHA-256, SHA-384, and SHA-512 algorithms. It determines the algorithm
+to use based on the hash length.
``spack md5``
^^^^^^^^^^^^^^^^^^^^^^
@@ -442,7 +459,7 @@ example for ``libelf``:
How many would you like to checksum? (default is 5, q to abort)
This does the same thing that ``spack create`` does, but it allows you
-to go back and add new vesrions easily as you need them (e.g., as
+to go back and add new versions easily as you need them (e.g., as
they're released). It fetches the tarballs you ask for and prints out
a list of ``version`` commands ready to copy/paste into your package
file:
@@ -459,54 +476,57 @@ By default, Spack will search for new tarball downloads by scraping
the parent directory of the tarball you gave it. So, if your tarball
is at ``http://example.com/downloads/foo-1.0.tar.gz``, Spack will look
in ``http://example.com/downloads/`` for links to additional versions.
-If you need to search another path for download links, see the
-reference documentation on `attribute_list_url`_ and
-`attributee_list_depth`_.
+If you need to search another path for download links, you can supply
+some extra attributes that control how your package finds new
+versions. See the documentation on `attribute_list_url`_ and
+`attribute_list_depth`_.
.. note::
* This command assumes that Spack can extrapolate new URLs from an
existing URL in the package, and that Spack can find similar URLs
- on a webpage. If that's not possible, you'll need to manually add
- ``version`` calls yourself.
+ on a webpage. If that's not possible, e.g. if the package's
+ developers don't name their tarballs consistently, you'll need to
+ manually add ``version`` calls yourself.
* For ``spack checksum`` to work, Spack needs to be able to
- ``import`` your pacakge in Python. That means it can't have any
+ ``import`` your package in Python. That means it can't have any
syntax errors, or the ``import`` will fail. Use this once you've
got your package in working order.
.. _vcs-fetch:
-Fetching from VCS Repositories
+Fetching from VCS repositories
--------------------------------------
-For some packages, source code is hosted in a Version Control System
-(VCS) repository rather than as a tarball. Packages can be set up to
-fetch from a repository instead of a tarball. Currently, Spack
-supports fetching with `Git <git-fetch_>`_, `Mercurial (hg)
-<hg-fetch_>`_, and `Subversion (SVN) <svn-fetch_>`_.
+For some packages, source code is provided in a Version Control System
+(VCS) repository rather than in a tarball. Spack can fetch packages
+from VCS repositories. Currently, Spack supports fetching with `Git
+<git-fetch_>`_, `Mercurial (hg) <hg-fetch_>`_, and `Subversion (SVN)
+<svn-fetch_>`_.
To fetch a package from a source repository, you add a ``version()``
call to your package with parameters indicating the repository URL and
-any branch, tag, or revision to fetch. See below for the paramters
+any branch, tag, or revision to fetch. See below for the parameters
you'll need for each VCS system.
Repositories and versions
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The package author is responsible for coming up with a sensible name
-for each version. For example, if you're fetching from a tag like
-``v1.0``, you might call that ``1.0``. If you're fetching a nameless
-git commit or an older subversion revision, you might give the commit
-an intuitive name, like ``dev`` for a development version, or
-``some-fancy-new-feature`` if you want to be more specific.
+for each version to be fetched from a repository. For example, if
+you're fetching from a tag like ``v1.0``, you might call that ``1.0``.
+If you're fetching a nameless git commit or an older subversion
+revision, you might give the commit an intuitive name, like ``dev``
+for a development version, or ``some-fancy-new-feature`` if you want
+to be more specific.
In general, it's recommended to fetch tags or particular
commits/revisions, NOT branches or the repository mainline, as
branches move forward over time and you aren't guaranteed to get the
same thing every time you fetch a particular version. Life isn't
-simple, though, so this is not strictly enforced.
+always simple, though, so this is not strictly enforced.
In some future release, Spack may support extrapolating repository
versions as it does for tarball URLs, but currently this is not
@@ -623,7 +643,7 @@ Revisions
revisions, you can use ``revision`` for branches, tags, and commits
when you fetch with Mercurial.
-As wtih git, you can fetch these versions using the ``spack install
+As with git, you can fetch these versions using the ``spack install
example@<version>`` command-line syntax.
.. _svn-fetch:
@@ -633,7 +653,7 @@ Subversion
To fetch with subversion, use the ``svn`` and ``revision`` parameters:
-Head
+Fetching the head
Simply add an ``svn`` parameter to ``version``:
.. code-block:: python
@@ -642,7 +662,7 @@ Head
This is not recommended, as the head will move forward over time.
-Revisions
+Fetching a revision
To fetch a particular revision, add a ``revision`` to the
version call:
@@ -746,11 +766,105 @@ from the URL and then applied to your source code.
applies cleanly with ``-p1``, but if you're using a patch you didn't
create yourself, ``level`` can be handy.
+``patch()`` functions
+~~~~~~~~~~~~~~~~~~~~~~~~
+
+In addition to supplying patch files, you can write a custom function
+to patch a package's source. For example, the ``py-pyside`` package
+contains some custom code for tweaking the way the PySide build
+handles ``RPATH``:
+
+.. _pyside-patch:
-Finding Package Downloads
+.. code-block:: python
+ :linenos:
+
+ class PyPyside(Package):
+ ...
+
+ def patch(self):
+ """Undo PySide RPATH handling and add Spack RPATH."""
+ # Figure out the special RPATH
+ pypkg = self.spec['python'].package
+ rpath = self.rpath
+ rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide'))
+
+ # Add Spack's standard CMake args to the sub-builds.
+ # They're called BY setup.py so we have to patch it.
+ filter_file(
+ r'OPTION_CMAKE,',
+ r'OPTION_CMAKE, ' + (
+ '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", '
+ '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)),
+ 'setup.py')
+
+ # PySide tries to patch ELF files to remove RPATHs
+ # Disable this and go with the one we set.
+ filter_file(
+ r'^\s*rpath_cmd\(pyside_path, srcpath\)',
+ r'#rpath_cmd(pyside_path, srcpath)',
+ 'pyside_postinstall.py')
+
+A ``patch`` function, if present, will be run after patch files are
+applied and before ``install()`` is run.
+
+You could put this logic in ``install()``, but putting it in a patch
+function gives you some benefits. First, spack ensures that the
+``patch()`` function is run once per code checkout. That means that
+if you run install, hit ctrl-C, and run install again, the code in the
+patch function is only run once. Also, you can tell Spack to run only
+the patching part of the build using the :ref:`spack-patch` command.
+
+Handling RPATHs
----------------------------
-We've already seen the ``homepage`` and ``url`` package attributes:
+Spack installs each package in a way that ensures that all of its
+dependencies are found when it runs. It does this using `RPATHs
+<http://en.wikipedia.org/wiki/Rpath>`_. An RPATH is a search
+path, stored in a binary (an executable or library), that tells the
+dynamic loader where to find its dependencies at runtime. You may be
+familiar with ```LD_LIBRARY_PATH``
+<http://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html>`_
+on Linux or ```DYLD_LIBRARY_PATH``
+<https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/dyld.1.html>`
+on Mac OS X. RPATH is similar to these paths, in that it tells
+the loader where to find libraries. Unlike them, it is embedded in
+the binary and not set in each user's environment.
+
+RPATHs in Spack are handled in one of three ways:
+
+ 1. For most packages, RPATHs are handled automatically using Spack's
+ :ref:`compiler wrappers <compiler-wrappers>`. These wrappers are
+ set in standard variables like ``CC``, ``CXX``, and ``FC``, so
+ most build systems (autotools and many gmake systems) pick them
+ up and use them.
+ 2. CMake also respects Spack's compiler wrappers, but many CMake
+ builds have logic to overwrite RPATHs when binaries are
+ installed. Spack provides the ``std_cmake_args`` variable, which
+ includes parameters necessary for CMake build use the right
+ installation RPATH. It can be used like this when ``cmake`` is
+ invoked:
+
+ .. code-block:: python
+
+ class MyPackage(Package):
+ ...
+ def install(self, spec, prefix):
+ cmake('..', *std_cmake_args)
+ make()
+ make('install')
+
+ 3. If you need to modify the build to add your own RPATHs, you can
+ use the ``self.rpath`` property of your package, which will
+ return a list of all the RPATHs that Spack will use when it
+ links. You can see this how this is used in the :ref:`PySide
+ example <pyside-patch>` above.
+
+
+Finding new versions
+----------------------------
+
+You've already seen the ``homepage`` and ``url`` package attributes:
.. code-block:: python
:linenos:
@@ -786,7 +900,7 @@ url is:
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
-Spack spiders ``http://www.mr511.de/software/`` to find similar
+Here, Spack spiders ``http://www.mr511.de/software/`` to find similar
tarball links and ultimately to make a list of available versions of
``libelf``.
@@ -813,7 +927,7 @@ the ``list_url``, because that is where links to old versions are:
~~~~~~~~~~~~~~~~~~~~~
``libdwarf`` and many other packages have a listing of available
-verisons on a single webpage, but not all do. For example, ``mpich``
+versions on a single webpage, but not all do. For example, ``mpich``
has a tarball URL that looks like this:
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
@@ -840,7 +954,7 @@ when spidering the page.
.. _attribute_parallel:
-Parallel Builds
+Parallel builds
------------------
By default, Spack will invoke ``make()`` with a ``-j <njobs>``
@@ -969,13 +1083,210 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.
+.. _setup-dependent-environment:
+
+``setup_dependent_environment()``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Spack provides a mechanism for dependencies to provide variables that
+can be used in their dependents' build. Any package can declare a
+``setup_dependent_environment()`` function, and this function will be
+called before the ``install()`` method of any dependent packages.
+This allows dependencies to set up environment variables and other
+properties to be used by dependents.
+
+The function declaration should look like this:
+
+.. code-block:: python
+
+ class Qt(Package):
+ ...
+ def setup_dependent_environment(self, module, spec, dep_spec):
+ """Dependencies of Qt find it using the QTDIR environment variable."""
+ os.environ['QTDIR'] = self.prefix
+
+Here, the Qt package sets the ``QTDIR`` environment variable so that
+packages that depend on a particular Qt installation will find it.
+
+The arguments to this function are:
+
+ * **module**: the module of the dependent package, where global
+ properties can be assigned.
+ * **spec**: the spec of the *dependency package* (the one the function is called on).
+ * **dep_spec**: the spec of the dependent package (i.e. dep_spec depends on spec).
+
+A good example of using these is in the Python package:
+
+.. code-block:: python
+
+ def setup_dependent_environment(self, module, spec, dep_spec):
+ # Python extension builds can have a global python executable function
+ module.python = Executable(join_path(spec.prefix.bin, 'python'))
+
+ # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
+ module.python_lib_dir = os.path.join(dep_spec.prefix, self.python_lib_dir)
+ module.python_include_dir = os.path.join(dep_spec.prefix, self.python_include_dir)
+ module.site_packages_dir = os.path.join(dep_spec.prefix, self.site_packages_dir)
+
+ # Make the site packages directory if it does not exist already.
+ mkdirp(module.site_packages_dir)
+
+ # Set PYTHONPATH to include site-packages dir for the
+ # extension and any other python extensions it depends on.
+ python_paths = []
+ for d in dep_spec.traverse():
+ if d.package.extends(self.spec):
+ python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
+ os.environ['PYTHONPATH'] = ':'.join(python_paths)
+
+The first thing that happens here is that the ``python`` command is
+inserted into module scope of the dependent. This allows most python
+packages to have a very simple install method, like this:
+
+.. code-block:: python
+
+ def install(self, spec, prefix):
+ python('setup.py', 'install', '--prefix=%s' % prefix)
+
+Python's ``setup_dependent_environment`` method also sets up some
+other variables, creates a directory, and sets up the ``PYTHONPATH``
+so that dependent packages can find their dependencies at build time.
+
+
+.. _packaging_extensions:
+
+Extensions
+-------------------------
+
+Spack's support for package extensions is documented extensively in
+:ref:`extensions`. This section documents how to make your own
+extendable packages and extensions.
+
+To support extensions, a package needs to set its ``extendable``
+property to ``True``, e.g.:
+
+.. code-block:: python
+
+ class Python(Package):
+ ...
+ extendable = True
+ ...
+
+To make a package into an extension, simply add simply add an
+``extends`` call in the package definition, and pass it the name of an
+extendable package:
+
+.. code-block:: python
+
+ class PyNumpy(Package):
+ ...
+ extends('python')
+ ...
+
+Now, the ``py-numpy`` package can be used as an argument to ``spack
+activate``. When it is activated, all the files in its prefix will be
+symbolically linked into the prefix of the python package.
+
+Sometimes, certain files in one package will conflict with those in
+another, which means they cannot both be activated (symlinked) at the
+same time. In this case, you can tell Spack to ignore those files
+when it does the activation:
+
+.. code-block:: python
+
+ class PyNose(Package):
+ ...
+ extends('python', ignore=r'bin/nosetests.*$')
+ ...
+
+The code above will prevent ``$prefix/bin/nosetests`` from being
+linked in at activation time.
+
+.. note::
+
+ You can call *either* ``depends_on`` or ``extends`` on any one
+ package, but not both. For example you cannot both
+ ``depends_on('python')`` and ``extends(python)`` in the same
+ package. ``extends`` implies ``depends_on``.
+
+
+
+Activation & deactivation
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Spack's ``Package`` class has default ``activate`` and ``deactivate``
+implementations that handle symbolically linking extensions' prefixes
+into the directory of the parent package. However, extendable
+packages can override these methods to add custom activate/deactivate
+logic of their own. For example, the ``activate`` and ``deactivate``
+methods in the Python class use the symbolic linking, but they also
+handle details surrounding Python's ``.pth`` files, and other aspects
+of Python packaging.
+
+Spack's extensions mechanism is designed to be extensible, so that
+other packages (like Ruby, R, Perl, etc.) can provide their own
+custom extension management logic, as they may not handle modules the
+same way that Python does.
+
+Let's look at Python's activate function:
+
+.. code-block:: python
+
+ def activate(self, ext_pkg, **kwargs):
+ kwargs.update(ignore=self.python_ignore(ext_pkg, kwargs))
+ super(Python, self).activate(ext_pkg, **kwargs)
+
+ exts = spack.install_layout.extension_map(self.spec)
+ exts[ext_pkg.name] = ext_pkg.spec
+ self.write_easy_install_pth(exts)
+
+This function is called on the *extendee* (Python). It first calls
+``activate`` in the superclass, which handles symlinking the
+extension package's prefix into this package's prefix. It then does
+some special handling of the ``easy-install.pth`` file, part of
+Python's setuptools.
+
+Deactivate behaves similarly to activate, but it unlinks files:
+
+.. code-block:: python
+
+ def deactivate(self, ext_pkg, **kwargs):
+ kwargs.update(ignore=self.python_ignore(ext_pkg, kwargs))
+ super(Python, self).deactivate(ext_pkg, **kwargs)
+
+ exts = spack.install_layout.extension_map(self.spec)
+ if ext_pkg.name in exts: # Make deactivate idempotent.
+ del exts[ext_pkg.name]
+ self.write_easy_install_pth(exts)
+
+Both of these methods call some custom functions in the Python
+package. See the source for Spack's Python package for details.
+
+
+Activation arguments
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+You may have noticed that the ``activate`` function defined above
+takes keyword arguments. These are the keyword arguments from
+``extends()``, and they are passed to both activate and deactivate.
+
+This capability allows an extension to customize its own activation by
+passing arguments to the extendee. Extendees can likewise implement
+custom ``activate()`` and ``deactivate()`` functions to suit their
+needs.
+
+The only keyword argument supported by default is the ``ignore``
+argument, which can take a regex, list of regexes, or a predicate to
+determine which files *not* to symlink during activation.
+
+
.. _virtual-dependencies:
Virtual dependencies
-----------------------------
In some cases, more than one package can satisfy another package's
-dependency. One way this can happen is if a pacakge depends on a
+dependency. One way this can happen is if a package depends on a
particular *interface*, but there are multiple *implementations* of
the interface, and the package could be built with any of them. A
*very* common interface in HPC is the `Message Passing Interface (MPI)
@@ -988,7 +1299,7 @@ MPI has several different implementations (e.g., `MPICH
applications can be built with any one of them. Complicating matters,
MPI does not have a standardized ABI, so a package built with one
implementation cannot simply be relinked with another implementation.
-Many pacakage managers handle interfaces like this by requiring many
+Many package managers handle interfaces like this by requiring many
similar package files, e.g., ``foo``, ``foo-mvapich``, ``foo-mpich``,
but Spack avoids this explosion of package files by providing support
for *virtual dependencies*.
@@ -1014,7 +1325,7 @@ supplying a ``depends_on`` call in the package definition. For example:
depends_on("adept-utils")
depends_on("callpath")
-Here, ``callpath`` and ``adept-utils`` are concrete pacakges, but
+Here, ``callpath`` and ``adept-utils`` are concrete packages, but
there is no actual package file for ``mpi``, so we say it is a
*virtual* package. The syntax of ``depends_on``, is the same for
both. If we look inside the package file of an MPI implementation,
@@ -1038,7 +1349,7 @@ to ``provides`` to add constraints. This allows Spack to support the
notion of *versioned interfaces*. The MPI standard has gone through
many revisions, each with new functions added, and each revision of
the standard has a version number. Some packages may require a recent
-implementation that supports MPI-3 fuctions, but some MPI versions may
+implementation that supports MPI-3 functions, but some MPI versions may
only provide up to MPI-2. Others may need MPI 2.1 or higher. You can
indicate this by adding a version constraint to the spec passed to
``provides``:
@@ -1070,7 +1381,7 @@ constraints on the *providing* package, or the *provider*. The
provider only provides the declared virtual spec when *it* matches
the constraints in the when clause. Here, when ``mpich`` is at
version 3 or higher, it provides MPI up to version 3. When ``mpich``
-is at version 1 or higher, it provides the MPI virtual pacakge at
+is at version 1 or higher, it provides the MPI virtual package at
version 1.
The ``when`` qualifier ensures that Spack selects a suitably high
@@ -1190,6 +1501,7 @@ explicitly. Concretization policies are discussed in more detail in
:ref:`site-configuration`. Sites using Spack can customize them to
match the preferences of their own users.
+.. _spack-spec:
``spack spec``
~~~~~~~~~~~~~~~~~~~~
@@ -1232,7 +1544,7 @@ software should be installed.
Spack provides wrapper functions for ``configure`` and ``make`` so
that you can call them in a similar way to how you'd call a shell
-comamnd. In reality, these are Python functions. Spack provides
+command. In reality, these are Python functions. Spack provides
these functions to make writing packages more natural. See the section
on :ref:`shell wrappers <shell-wrappers>`.
@@ -1287,11 +1599,11 @@ information.
.. _install-environment:
-The Install environment
+The install environment
--------------------------
In general, you should not have to do much differently in your install
-method than you would when installing a pacakge on the command line.
+method than you would when installing a package on the command line.
In fact, you may need to do *less* than you would on the command line.
Spack tries to set environment variables and modify compiler calls so
@@ -1314,7 +1626,7 @@ purposes:
#. Make build systems use Spack's compiler wrappers for their builds.
#. Allow build systems to find dependencies more easily
-The Compiler enviroment variables that Spack sets are:
+The Compiler environment variables that Spack sets are:
============ ===============================
Variable Purpose
@@ -1344,9 +1656,10 @@ entering ``install()`` so that packages can locate dependencies
easily:
======================= =============================
- ``PATH`` Set to point to ``/bin`` directories of dpeendencies
+ ``PATH`` Set to point to ``/bin`` directories of dependencies
``CMAKE_PREFIX_PATH`` Path to dependency prefixes for CMake
``PKG_CONFIG_PATH`` Path to any pkgconfig directories for dependencies
+ ``PYTHONPATH`` Path to site-packages dir of any python dependencies
======================= =============================
``PATH`` is set up to point to dependencies ``/bin`` directories so
@@ -1366,6 +1679,12 @@ dependencies using the GNU ``pkg-config`` tool. It is similar to
``CMAKE_PREFIX_PATH`` in that it allows a build to automatically
discover its dependencies.
+If you want to see the environment that a package will build with, or
+if you want to run commands in that environment to test them out, you
+can use the :ref:```spack env`` <spack-env>` command, documented
+below.
+
+.. _compiler-wrappers:
Compiler interceptors
~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -1423,7 +1742,7 @@ the command line.
Forking ``install()``
~~~~~~~~~~~~~~~~~~~~~
-To give packagers free reign over their install environemnt, Spack
+To give packagers free reign over their install environment, Spack
forks a new process each time it invokes a package's ``install()``
method. This allows packages to have their own completely sandboxed
build environment, without impacting other jobs that the main Spack
@@ -1431,7 +1750,6 @@ process runs. Packages are free to change the environment or to
modify Spack internals, because each ``install()`` call has its own
dedicated process.
-
.. _prefix-objects:
Prefix objects
@@ -1552,7 +1870,7 @@ dependency version.
You can use ``satisfies()`` to test for particular dependencies,
e.g. ``foo.satisfies('^openmpi@1.2')`` or ``foo.satisfies('^mpich')``,
-or you can use Python's builtin ``in`` operator:
+or you can use Python's built-in ``in`` operator:
.. code-block:: python
@@ -1581,7 +1899,7 @@ Accessing Dependencies
~~~~~~~~~~~~~~~~~~~~~~~~~~
You may need to get at some file or binary that's in the prefix of one
-of your dependencies. You can do that by subscripting the spec:
+of your dependencies. You can do that by sub-scripting the spec:
.. code-block:: python
@@ -1901,9 +2219,9 @@ File functions
Create an empty file at ``path``.
-.. _pacakge-lifecycle:
+.. _package-lifecycle:
-Package Workflow Commands
+Packaging workflow commands
---------------------------------
When you are building packages, you will likely not get things
@@ -1930,7 +2248,9 @@ A typical package workflow might look like this:
... repeat clean/install until install works ...
Below are some commands that will allow you some finer-grained
-controll over the install process.
+control over the install process.
+
+.. _spack-fetch:
``spack fetch``
~~~~~~~~~~~~~~~~~
@@ -1944,6 +2264,8 @@ directory will be located under ``$SPACK_HOME/var/spack``.
When run after the archive has already been downloaded, ``spack
fetch`` is idempotent and will not download the archive again.
+.. _spack-stage:
+
``spack stage``
~~~~~~~~~~~~~~~~~
@@ -1952,6 +2274,8 @@ the downloaded archive in its temporary directory, where it will be
built by ``spack install``. Similar to ``fetch``, if the archive has
already been expanded, ``stage`` is idempotent.
+.. _spack-patch:
+
``spack patch``
~~~~~~~~~~~~~~~~~
@@ -1963,43 +2287,45 @@ this step if they have been. If Spack discovers that patches didn't
apply cleanly on some previous run, then it will restage the entire
package before patching.
+.. _spack-restage:
-``spack clean``
+``spack restage``
~~~~~~~~~~~~~~~~~
+Restores the source code to pristine state, as it was before building.
-There are several variations of ``spack clean``. With no arguments,
-``spack clean`` runs ``make clean`` in the expanded archive directory.
-This is useful if an attempted build failed, and something needs to be
-changed to get a package to build. If a particular package does not
-have a ``make clean`` target, this will do nothing.
+Does this in one of two ways:
-``spack clean -w / --work``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Deletes the entire build directory and re-expands it from the downloaded
-archive. This is useful if a package does not support a proper ``make clean``
-target.
+ 1. If the source was fetched as a tarball, deletes the entire build
+ directory and re-expands the tarball.
+
+ 2. If the source was checked out from a repository, this deletes the
+ build directory and checks it out again.
-``spack clean -d / --dist``
+.. _spack-clean:
+
+``spack clean``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Deletes the build directory *and* the downloaded archive. If
-``fetch``, ``stage``, or ``install`` are run again after this, the
-process will start from scratch, and the archive archive will be
-downloaded again. Useful if somehow a bad archive is downloaded
-accidentally and needs to be cleaned out of the staging area.
+Cleans up temporary files for a particular package, by deleting the
+expanded/checked out source code *and* any downloaded archive. If
+``fetch``, ``stage``, or ``install`` are run again after this, Spack's
+build process will start from scratch.
+
+
+.. _spack-purge:
``spack purge``
~~~~~~~~~~~~~~~~~
-
-Cleans up *everything* in the build directory. You can use this to
-recover disk space if temporary files from interrupted or failed
-installs accumulate in the staging area.
+Cleans up all of Spack's temporary files. Use this to recover disk
+space if temporary files from interrupted or failed installs
+accumulate in the staging area. This is equivalent to running ``spack
+clean`` for every package you have fetched or staged.
Keeping the stage directory on success
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
By default, ``spack install`` will delete the staging area once a
-pacakge has been successfully built and installed. Use
+package has been successfully built and installed. Use
``--keep-stage`` to leave the build directory intact:
.. code-block:: sh
@@ -2032,7 +2358,86 @@ to get rid of the install prefix before you build again:
spack uninstall -f <spec>
-Interactive Shell Support
+Graphing dependencies
+--------------------------
+
+.. _spack-graph:
+
+``spack graph``
+~~~~~~~~~~~~~~~~~~~
+
+Spack provides the ``spack graph`` command for graphing dependencies.
+The command by default generates an ASCII rendering of a spec's
+dependency graph. For example::
+
+ $ spack graph mpileaks
+ o mpileaks
+ |\
+ | |\
+ | o | callpath
+ |/| |
+ | |\|
+ | |\ \
+ | | |\ \
+ | | | | o adept-utils
+ | |_|_|/|
+ |/| | | |
+ o | | | | mpi
+ / / / /
+ | | o | dyninst
+ | |/| |
+ |/|/| |
+ | | |/
+ | o | libdwarf
+ |/ /
+ o | libelf
+ /
+ o boost
+
+At the top is the root package in the DAG, with dependency edges
+emerging from it. On a color terminal, the edges are colored by which
+dependency they lead to.
+
+You can also use ``spack graph`` to generate graphs in the widely used
+`Dot <http://www.graphviz.org/doc/info/lang.html>`_ format. For
+example::
+
+ $ spack graph --dot mpileaks
+ digraph G {
+ label = "Spack Dependencies"
+ labelloc = "b"
+ rankdir = "LR"
+ ranksep = "5"
+
+ "boost" [label="boost"]
+ "callpath" [label="callpath"]
+ "libdwarf" [label="libdwarf"]
+ "mpileaks" [label="mpileaks"]
+ "mpi" [label="mpi"]
+ "adept-utils" [label="adept-utils"]
+ "dyninst" [label="dyninst"]
+ "libelf" [label="libelf"]
+
+ "callpath" -> "dyninst"
+ "callpath" -> "adept-utils"
+ "callpath" -> "mpi"
+ "callpath" -> "libelf"
+ "callpath" -> "libdwarf"
+ "libdwarf" -> "libelf"
+ "mpileaks" -> "adept-utils"
+ "mpileaks" -> "callpath"
+ "mpileaks" -> "mpi"
+ "adept-utils" -> "boost"
+ "adept-utils" -> "mpi"
+ "dyninst" -> "boost"
+ "dyninst" -> "libelf"
+ "dyninst" -> "libdwarf"
+ }
+
+This graph can be provided as input to other graphing tools, such as
+those in `Graphviz <http://www.graphviz.org>`_.
+
+Interactive shell support
--------------------------
Spack provides some limited shell support to make life easier for
@@ -2048,6 +2453,7 @@ For ``csh`` and ``tcsh`` run:
``spack cd`` will then be available.
+.. _spack-cd:
``spack cd``
~~~~~~~~~~~~~~~~~
@@ -2068,7 +2474,7 @@ build it:
/Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13
``spack cd`` here changed he current working directory to the
-directory containing theexpanded ``libelf`` source code. There are a
+directory containing the expanded ``libelf`` source code. There are a
number of other places you can cd to in the spack directory hierarchy:
.. command-output:: spack cd -h
@@ -2078,6 +2484,35 @@ directory, install directory, package directory) and others change to
core spack locations. For example, ``spack cd -m`` will take you to
the main python source directory of your spack install.
+.. _spack-env:
+
+``spack env``
+~~~~~~~~~~~~~~~~~~~~~~
+
+``spack env`` functions much like the standard unix ``env`` command,
+but it takes a spec as an argument. You can use it to see the
+environment variables that will be set when a particular build runs,
+for example:
+
+.. code-block:: sh
+
+ $ spack env mpileaks@1.1%intel
+
+This will display the entire environment that will be set when the
+``mpileaks@1.1%intel`` build runs.
+
+To run commands in a package's build environment, you can simply provided them after the spec argument to ``spack env``:
+
+.. code-block:: sh
+
+ $ spack cd mpileaks@1.1%intel
+ $ spack env mpileaks@1.1%intel ./configure
+
+This will cd to the build directory and then run ``configure`` in the
+package's build environment.
+
+
+.. _spack-location:
``spack location``
~~~~~~~~~~~~~~~~~~~~~~
diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst
index 4936e3052c..44071bbfc6 100644
--- a/lib/spack/docs/site_configuration.rst
+++ b/lib/spack/docs/site_configuration.rst
@@ -1,208 +1,15 @@
.. _site-configuration:
-Site-specific configuration
+Site configuration
===================================
-.. _mirrors:
-
-Mirrors
-----------------------------
-
-Some sites may not have access to the internet for fetching packages.
-These sites will need a local repository of tarballs from which they
-can get their files. Spack has support for this with *mirrors*. A
-mirror is a URL that points to a directory, either on the local
-filesystem or on some server, containing tarballs for all of Spack's
-packages.
-
-Here's an example of a mirror's directory structure::
-
- mirror/
- cmake/
- cmake-2.8.10.2.tar.gz
- dyninst/
- DyninstAPI-8.1.1.tgz
- DyninstAPI-8.1.2.tgz
- libdwarf/
- libdwarf-20130126.tar.gz
- libdwarf-20130207.tar.gz
- libdwarf-20130729.tar.gz
- libelf/
- libelf-0.8.12.tar.gz
- libelf-0.8.13.tar.gz
- libunwind/
- libunwind-1.1.tar.gz
- mpich/
- mpich-3.0.4.tar.gz
- mvapich2/
- mvapich2-1.9.tgz
-
-The structure is very simple. There is a top-level directory. The
-second level directories are named after packages, and the third level
-contains tarballs for each package, named as they were in the
-package's fetch URL.
-
-``spack mirror``
-~~~~~~~~~~~~~~~~~~~~~~~
-
-Mirrors are managed with the ``spack mirror`` command. The help for
-``spack mirror`` looks like this::
-
- $ spack mirror -h
- usage: spack mirror [-h] SUBCOMMAND ...
-
- positional arguments:
- SUBCOMMAND
- create Create a directory to be used as a spack mirror, and fill
- it with package archives.
- add Add a mirror to Spack.
- remove Remove a mirror by name.
- list Print out available mirrors to the console.
-
- optional arguments:
- -h, --help show this help message and exit
-
-The ``create`` command actually builds a mirror by fetching all of its
-packages from the internet and checksumming them.
-
-The other three commands are for managing mirror configuration. They
-control the URL(s) from which Spack downloads its packages.
-
-
-``spack mirror create``
-~~~~~~~~~~~~~~~~~~~~~~~
-
-You can create a mirror using the ``spack mirror create`` command, assuming
-you're on a machine where you can access the internet.
-
-The command will iterate through all of Spack's packages and download
-the safe ones into a directory structure like the one above. Here is
-what it looks like:
-
-
-.. code-block:: bash
-
- $ spack mirror create libelf libdwarf
- ==> Created new mirror in spack-mirror-2014-06-24
- ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
- ########################################################## 81.6%
- ==> Checksum passed for libelf@0.8.13
- ==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.13.tar.gz to mirror
- ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz
- ###################################################################### 98.6%
- ==> Checksum passed for libelf@0.8.12
- ==> Added spack-mirror-2014-06-24/libelf/libelf-0.8.12.tar.gz to mirror
- ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130207.tar.gz
- ###################################################################### 97.3%
- ==> Checksum passed for libdwarf@20130207
- ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130207.tar.gz to mirror
- ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130126.tar.gz
- ######################################################## 78.9%
- ==> Checksum passed for libdwarf@20130126
- ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130126.tar.gz to mirror
- ==> Trying to fetch from http://www.prevanders.net/libdwarf-20130729.tar.gz
- ############################################################# 84.7%
- ==> Checksum passed for libdwarf@20130729
- ==> Added spack-mirror-2014-06-24/libdwarf/libdwarf-20130729.tar.gz to mirror
-
-Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
-copy it over to the machine you want it hosted on.
-
-Custom package sets
-^^^^^^^^^^^^^^^^^^^^^^^^
-
-Normally, ``spack mirror create`` downloads all the archives it has
-checksums for. If you want to only create a mirror for a subset of
-packages, you can do that by supplying a list of package specs on the
-command line after ``spack mirror create``. For example, this
-command::
-
- $ spack mirror create libelf@0.8.12: boost@1.44:
-
-Will create a mirror for libelf versions greater than or equal to
-0.8.12 and boost versions greater than or equal to 1.44.
-
-Mirror files
-^^^^^^^^^^^^^^^^^^^^^^^^
-
-If you have a *very* large number of packages you want to mirror, you
-can supply a file with specs in it, one per line::
-
- $ cat specs.txt
- libdwarf
- libelf@0.8.12:
- boost@1.44:
- boost@1.39.0
- ...
- $ spack mirror create -f specs.txt
- ...
-
-This is useful if there is a specific suite of software managed by
-your site.
-
-
-``spack mirror add``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Once you have a mirrror, you need to let spack know about it. This is
-relatively simple. First, figure out the URL for the mirror. If it's
-a file, you can use a file URL like this one::
-
- file:///Users/gamblin2/spack-mirror-2014-06-24
-
-That points to the directory on the local filesystem. If it were on a
-web server, you could use a URL like this one:
-
- https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
-
-Spack will use the URL as the root for all of the packages it fetches.
-You can tell your Spack installation to use that mirror like this:
-
-.. code-block:: bash
-
- $ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
-
-Each mirror has a name so that you can refer to it again later.
-
-``spack mirror list``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you want to see all the mirrors Spack knows about you can run ``spack mirror list``::
-
- $ spack mirror list
- local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
-
-``spack mirror remove``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-And, if you want to remove a mirror, just remove it by name::
-
- $ spack mirror remove local_filesystem
- $ spack mirror list
- ==> No mirrors configured.
-
-Mirror precedence
-~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Adding a mirror really just adds a section in ``~/.spackconfig``::
-
- [mirror "local_filesystem"]
- url = file:///Users/gamblin2/spack-mirror-2014-06-24
- [mirror "remote_server"]
- url = https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
-
-If you want to change the order in which mirrors are searched for
-packages, you can edit this file and reorder the sections. Spack will
-search the topmost mirror first and the bottom-most mirror last.
-
-
.. _temp-space:
Temporary space
----------------------------
.. warning:: Temporary space configuration will be moved to configuration files.
- The intructions here are old and refer to ``__init__.py``
+ The instructions here are old and refer to ``__init__.py``
By default, Spack will try to do all of its building in temporary
space. There are two main reasons for this. First, Spack is designed
@@ -286,7 +93,7 @@ the virtual spec to specs for possible implementations, and
later, so there is no need to fully concretize the spec when returning
it.
-The ``DefaultConcretizer`` is intendend to provide sensible defaults
+The ``DefaultConcretizer`` is intended to provide sensible defaults
for each policy, but there are certain choices that it can't know
about. For example, one site might prefer ``OpenMPI`` over ``MPICH``,
or another might prefer an old version of some packages. These types
@@ -327,3 +134,53 @@ Set concretizer to *your own* class instead of the default:
concretizer = MyConcretizer()
The next time you run Spack, your changes should take effect.
+
+
+Profiling
+~~~~~~~~~~~~~~~~~~~~~
+
+Spack has some limited built-in support for profiling, and can report
+statistics using standard Python timing tools. To use this feature,
+supply ``-p`` to Spack on the command line, before any subcommands.
+
+.. _spack-p:
+
+``spack -p``
+^^^^^^^^^^^^^^^^^^
+
+``spack -p`` output looks like this:
+
+.. code-block:: sh
+
+ $ spack -p graph dyninst
+ o dyninst
+ |\
+ | |\
+ | o | libdwarf
+ |/ /
+ o | libelf
+ /
+ o boost
+
+ 307670 function calls (305943 primitive calls) in 0.127 seconds
+
+ Ordered by: internal time
+
+ ncalls tottime percall cumtime percall filename:lineno(function)
+ 853 0.021 0.000 0.066 0.000 inspect.py:472(getmodule)
+ 51197 0.011 0.000 0.018 0.000 inspect.py:51(ismodule)
+ 73961 0.010 0.000 0.010 0.000 {isinstance}
+ 1762 0.006 0.000 0.053 0.000 inspect.py:440(getsourcefile)
+ 32075 0.006 0.000 0.006 0.000 {hasattr}
+ 1760 0.004 0.000 0.004 0.000 {posix.stat}
+ 2240 0.004 0.000 0.004 0.000 {posix.lstat}
+ 2602 0.004 0.000 0.011 0.000 inspect.py:398(getfile)
+ 771 0.004 0.000 0.077 0.000 inspect.py:518(findsource)
+ 2656 0.004 0.000 0.004 0.000 {method 'match' of '_sre.SRE_Pattern' objects}
+ 30772 0.003 0.000 0.003 0.000 {method 'get' of 'dict' objects}
+ ...
+
+The bottom of the output shows the top most time consuming functions,
+slowest on top. The profiling support is from Python's built-in tool,
+`cProfile
+<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 266e41cb48..19ca31cace 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -1,140 +1,332 @@
-#!/usr/bin/env python
-import sys
-if not sys.version_info[:2] >= (2,6):
- sys.exit("Spack requires Python 2.6. Version was %s." % sys.version_info)
-
-import os
-import re
-import subprocess
-from contextlib import closing
-
-# Import spack parameters through the build environment.
-spack_lib = os.environ.get("SPACK_LIB")
-if not spack_lib:
- print "Spack compiler must be run from spack!"
- sys.exit(1)
-
-# Grab a minimal set of spack packages
-sys.path.append(spack_lib)
-from spack.compilation import *
-from external import argparse
-import llnl.util.tty as tty
-
-spack_prefix = get_env_var("SPACK_PREFIX")
-spack_debug = get_env_flag("SPACK_DEBUG")
-spack_deps = get_path("SPACK_DEPENDENCIES")
-spack_env_path = get_path("SPACK_ENV_PATH")
-spack_debug_log_dir = get_env_var("SPACK_DEBUG_LOG_DIR")
-spack_spec = get_env_var("SPACK_SPEC")
-
-compiler_spec = get_env_var("SPACK_COMPILER_SPEC")
-spack_cc = get_env_var("SPACK_CC", required=False)
-spack_cxx = get_env_var("SPACK_CXX", required=False)
-spack_f77 = get_env_var("SPACK_F77", required=False)
-spack_fc = get_env_var("SPACK_FC", required=False)
-
-# Figure out what type of operation we're doing
-command = os.path.basename(sys.argv[0])
-
-cpp, cc, ccld, ld, version_check = range(5)
-
-if command == 'cpp':
- mode = cpp
-elif command == 'ld':
- mode = ld
-elif '-E' in sys.argv:
- mode = cpp
-elif '-c' in sys.argv:
- mode = cc
-else:
- mode = ccld
-
-
-if command in ('cc', 'gcc', 'c89', 'c99', 'clang'):
- command = spack_cc
- language = "C"
-elif command in ('c++', 'CC', 'g++', 'clang++'):
- command = spack_cxx
- language = "C++"
-elif command in ('f77'):
- command = spack_f77
- language = "Fortran 77"
-elif command in ('fc', 'f90', 'f95'):
- command = spack_fc
- language = "Fortran 90"
-elif command in ('ld', 'cpp'):
- pass # leave it the same. TODO: what's the right thing?
-else:
- raise Exception("Unknown compiler: %s" % command)
-
-if command is None:
- print "ERROR: Compiler '%s' does not support compiling %s programs." % (
- compiler_spec, language)
- sys.exit(1)
-
-version_args = ['-V', '-v', '--version', '-dumpversion']
-if any(arg in sys.argv for arg in version_args):
- mode = version_check
-
-# Parse out the includes, libs, etc. so we can adjust them if need be.
-parser = argparse.ArgumentParser(add_help=False)
-parser.add_argument("-I", action='append', default=[], dest='include_path')
-parser.add_argument("-L", action='append', default=[], dest='lib_path')
-parser.add_argument("-l", action='append', default=[], dest='libs')
-
-options, other_args = parser.parse_known_args()
-rpaths, other_args = parse_rpaths(other_args)
-
-# Add dependencies' include and lib paths to our compiler flags.
-def add_if_dir(path_list, directory, index=None):
- if os.path.isdir(directory):
- if index is None:
- path_list.append(directory)
- else:
- path_list.insert(index, directory)
-
-for dep_dir in spack_deps:
- add_if_dir(options.include_path, os.path.join(dep_dir, "include"))
- add_if_dir(options.lib_path, os.path.join(dep_dir, "lib"))
- add_if_dir(options.lib_path, os.path.join(dep_dir, "lib64"))
-
-# Add our modified arguments to it.
-arguments = ['-I%s' % path for path in options.include_path]
-arguments += other_args
-arguments += ['-L%s' % path for path in options.lib_path]
-arguments += ['-l%s' % path for path in options.libs]
-
-# Add rpaths to install dir and its dependencies. We add both lib and lib64
-# here because we don't know which will be created.
-rpaths.extend(options.lib_path)
-rpaths.append('%s/lib' % spack_prefix)
-rpaths.append('%s/lib64' % spack_prefix)
-if mode == ccld:
- arguments += ['-Wl,-rpath,%s' % p for p in rpaths]
-elif mode == ld:
- pairs = [('-rpath', '%s' % p) for p in rpaths]
- arguments += [item for sublist in pairs for item in sublist]
-
-# Unset some pesky environment variables
-for var in ["LD_LIBRARY_PATH", "LD_RUN_PATH", "DYLD_LIBRARY_PATH"]:
- if var in os.environ:
- os.environ.pop(var)
-
-# Ensure that the delegated command doesn't just call this script again.
-remove_paths = ['.'] + spack_env_path
-path = [p for p in get_path("PATH") if p not in remove_paths]
-os.environ["PATH"] = ":".join(path)
-
-full_command = [command] + arguments
-
-if spack_debug:
- input_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.in.log' % spack_spec)
- output_log = os.path.join(spack_debug_log_dir, 'spack-cc-%s.out.log' % spack_spec)
- with closing(open(input_log, 'a')) as log:
- args = [os.path.basename(sys.argv[0])] + sys.argv[1:]
- log.write("%s\n" % " ".join(arg.replace(' ', r'\ ') for arg in args))
- with closing(open(output_log, 'a')) as log:
- log.write("%s\n" % " ".join(full_command))
-
-rcode = subprocess.call(full_command)
-sys.exit(rcode)
+#!/bin/bash
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# Spack compiler wrapper script.
+#
+# Compiler commands go through this compiler wrapper in Spack builds.
+# The compiler wrapper is a thin layer around the standard compilers.
+# It enables several key pieces of functionality:
+#
+# 1. It allows Spack to swap compilers into and out of builds easily.
+# 2. It adds several options to the compile line so that spack
+# packages can find their dependencies at build time and run time:
+# -I arguments for dependency /include directories.
+# -L arguments for dependency /lib directories.
+# -Wl,-rpath arguments for dependency /lib directories.
+#
+
+# This is the list of environment variables that need to be set before
+# the script runs. They are set by routines in spack.build_environment
+# as part of spack.package.Package.do_install().
+parameters="
+SPACK_PREFIX
+SPACK_ENV_PATH
+SPACK_DEBUG_LOG_DIR
+SPACK_COMPILER_SPEC
+SPACK_SHORT_SPEC"
+
+# The compiler input variables are checked for sanity later:
+# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
+# Debug flag is optional; set to true for debug logging:
+# SPACK_DEBUG
+# Test command is used to unit test the compiler script.
+# SPACK_TEST_COMMAND
+# Dependencies can be empty for pkgs with no deps:
+# SPACK_DEPENDENCIES
+
+# die()
+# Prints a message and exits with error 1.
+function die {
+ echo "$@"
+ exit 1
+}
+
+for param in $parameters; do
+ if [ -z "${!param}" ]; then
+ die "Spack compiler must be run from spack! Input $param was missing!"
+ fi
+done
+
+#
+# Figure out the type of compiler, the language, and the mode so that
+# the compiler script knows what to do.
+#
+# Possible languages are C, C++, Fortran 77, and Fortran 90.
+# 'command' is set based on the input command to $SPACK_[CC|CXX|F77|F90]
+#
+# 'mode' is set to one of:
+# cc compile
+# ld link
+# ccld compile & link
+# cpp preprocessor
+# vcheck version check
+#
+command=$(basename "$0")
+case "$command" in
+ cc|gcc|c89|c99|clang)
+ command="$SPACK_CC"
+ language="C"
+ ;;
+ c++|CC|g++|clang++)
+ command="$SPACK_CXX"
+ language="C++"
+ ;;
+ f77)
+ command="$SPACK_F77"
+ language="Fortran 77"
+ ;;
+ fc|f90|f95)
+ command="$SPACK_FC"
+ language="Fortran 90"
+ ;;
+ cpp)
+ mode=cpp
+ ;;
+ ld)
+ mode=ld
+ ;;
+ *)
+ die "Unkown compiler: $command"
+ ;;
+esac
+
+# Finish setting up the mode.
+if [ -z "$mode" ]; then
+ mode=ccld
+ for arg in "$@"; do
+ if [ "$arg" = -v -o "$arg" = -V -o "$arg" = --version -o "$arg" = -dumpversion ]; then
+ mode=vcheck
+ break
+ elif [ "$arg" = -E ]; then
+ mode=cpp
+ break
+ elif [ "$arg" = -c ]; then
+ mode=cc
+ break
+ fi
+ done
+fi
+
+# Dump the version and exist if we're in testing mode.
+if [ "$SPACK_TEST_COMMAND" = "dump-mode" ]; then
+ echo "$mode"
+ exit
+fi
+
+# Check that at least one of the real commands was actually selected,
+# otherwise we don't know what to execute.
+if [ -z "$command" ]; then
+ die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
+fi
+
+# Save original command for debug logging
+input_command="$@"
+
+#
+# Now do real parsing of the command line args, trying hard to keep
+# non-rpath linker arguments in the proper order w.r.t. other command
+# line arguments. This is important for things like groups.
+#
+includes=()
+libraries=()
+libs=()
+rpaths=()
+other_args=()
+
+while [ -n "$1" ]; do
+ case "$1" in
+ -I*)
+ arg="${1#-I}"
+ if [ -z "$arg" ]; then shift; arg="$1"; fi
+ includes+=("$arg")
+ ;;
+ -L*)
+ arg="${1#-L}"
+ if [ -z "$arg" ]; then shift; arg="$1"; fi
+ libraries+=("$arg")
+ ;;
+ -l*)
+ arg="${1#-l}"
+ if [ -z "$arg" ]; then shift; arg="$1"; fi
+ libs+=("$arg")
+ ;;
+ -Wl,*)
+ arg="${1#-Wl,}"
+ if [ -z "$arg" ]; then shift; arg="$1"; fi
+ if [[ "$arg" = -rpath=* ]]; then
+ rpaths+=("${arg#-rpath=}")
+ elif [[ "$arg" = -rpath ]]; then
+ shift; arg="$1"
+ if [[ "$arg" != -Wl,* ]]; then
+ die "-Wl,-rpath was not followed by -Wl,*"
+ fi
+ rpaths+=("${arg#-Wl,}")
+ else
+ other_args+=("-Wl,$arg")
+ fi
+ ;;
+ -Xlinker,*)
+ arg="${1#-Xlinker,}"
+ if [ -z "$arg" ]; then shift; arg="$1"; fi
+ if [[ "$arg" = -rpath=* ]]; then
+ rpaths+=("${arg#-rpath=}")
+ elif [[ "$arg" = -rpath ]]; then
+ shift; arg="$1"
+ if [[ "$arg" != -Xlinker,* ]]; then
+ die "-Xlinker,-rpath was not followed by -Xlinker,*"
+ fi
+ rpaths+=("${arg#-Xlinker,}")
+ else
+ other_args+=("-Xlinker,$arg")
+ fi
+ ;;
+ *)
+ other_args+=("$1")
+ ;;
+ esac
+ shift
+done
+
+# Dump parsed values for unit testing if asked for
+if [ -n "$SPACK_TEST_COMMAND" ]; then
+ IFS=$'\n'
+ case "$SPACK_TEST_COMMAND" in
+ dump-includes) echo "${includes[*]}";;
+ dump-libraries) echo "${libraries[*]}";;
+ dump-libs) echo "${libs[*]}";;
+ dump-rpaths) echo "${rpaths[*]}";;
+ dump-other-args) echo "${other_args[*]}";;
+ dump-all)
+ echo "INCLUDES:"
+ echo "${includes[*]}"
+ echo
+ echo "LIBRARIES:"
+ echo "${libraries[*]}"
+ echo
+ echo "LIBS:"
+ echo "${libs[*]}"
+ echo
+ echo "RPATHS:"
+ echo "${rpaths[*]}"
+ echo
+ echo "ARGS:"
+ echo "${other_args[*]}"
+ ;;
+ *)
+ echo "ERROR: Unknown test command"
+ exit 1 ;;
+ esac
+ exit
+fi
+
+# Read spack dependencies from the path environment variable
+IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES"
+for dep in "${deps[@]}"; do
+ if [ -d "$dep/include" ]; then
+ includes+=("$dep/include")
+ fi
+
+ if [ -d "$dep/lib" ]; then
+ libraries+=("$dep/lib")
+ rpaths+=("$dep/lib")
+ fi
+
+ if [ -d "$dep/lib64" ]; then
+ libraries+=("$dep/lib64")
+ rpaths+=("$dep/lib64")
+ fi
+done
+
+# Include all -L's and prefix/whatever dirs in rpath
+for dir in "${libraries[@]}"; do
+ [ "$dir" != "." ] && rpaths+=("$dir")
+done
+rpaths+=("$SPACK_PREFIX/lib")
+rpaths+=("$SPACK_PREFIX/lib64")
+
+# Put the arguments together
+args=()
+for dir in "${includes[@]}"; do args+=("-I$dir"); done
+args+=("${other_args[@]}")
+for dir in "${libraries[@]}"; do args+=("-L$dir"); done
+for lib in "${libs[@]}"; do args+=("-l$lib"); done
+
+if [ "$mode" = ccld ]; then
+ for dir in "${rpaths[@]}"; do
+ args+=("-Wl,-rpath")
+ args+=("-Wl,$dir");
+ done
+elif [ "$mode" = ld ]; then
+ for dir in "${rpaths[@]}"; do
+ args+=("-rpath")
+ args+=("$dir");
+ done
+fi
+
+#
+# Unset pesky environment variables that could affect build sanity.
+#
+unset LD_LIBRARY_PATH
+unset LD_RUN_PATH
+unset DYLD_LIBRARY_PATH
+
+#
+# Filter '.' and Spack environment directories out of PATH so that
+# this script doesn't just call itself
+#
+IFS=':' read -ra env_path <<< "$PATH"
+IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
+spack_env_dirs+=(".")
+PATH=""
+for dir in "${env_path[@]}"; do
+ remove=""
+ for rm_dir in "${spack_env_dirs[@]}"; do
+ if [ "$dir" = "$rm_dir" ]; then remove=True; fi
+ done
+ if [ -z "$remove" ]; then
+ if [ -z "$PATH" ]; then
+ PATH="$dir"
+ else
+ PATH="$PATH:$dir"
+ fi
+ fi
+done
+export PATH
+
+full_command=("$command")
+full_command+=("${args[@]}")
+
+#
+# Write the input and output commands to debug logs if it's asked for.
+#
+if [ "$SPACK_DEBUG" = "TRUE" ]; then
+ input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
+ output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
+ echo "$input_command" >> $input_log
+ echo "$mode ${full_command[@]}" >> $output_log
+fi
+
+exec "${full_command[@]}"
diff --git a/lib/spack/external/argparse.py b/lib/spack/external/argparse.py
index 42b64ee7be..394e5da152 100644
--- a/lib/spack/external/argparse.py
+++ b/lib/spack/external/argparse.py
@@ -1708,6 +1708,21 @@ class ArgumentParser(_AttributeHolder, _ActionsContainer):
self._positionals._add_action(action)
return action
+
+ def get_subparser(self, name):
+ """Gets a subparser added with the supplied name.
+ This is an extension to the standard argparse API.
+ """
+ subpasrsers_actions = [
+ action for action in self._actions
+ if isinstance(action, _SubParsersAction)]
+ for action in subpasrsers_actions:
+ for choice, subparser in action.choices.items():
+ if choice == name:
+ return subparser
+ return None
+
+
def _get_optional_actions(self):
return [action
for action in self._actions
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index dc722297ec..576aeb16bd 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -22,13 +22,15 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-__all__ = ['install', 'expand_user', 'working_dir', 'touch', 'mkdirp',
- 'join_path', 'ancestor', 'can_access', 'filter_file', 'change_sed_delimiter']
+__all__ = ['set_install_permissions', 'install', 'expand_user', 'working_dir',
+ 'touch', 'touchp', 'mkdirp', 'force_remove', 'join_path', 'ancestor',
+ 'can_access', 'filter_file', 'change_sed_delimiter', 'is_exe']
import os
import sys
import re
import shutil
+import stat
import errno
import getpass
from contextlib import contextmanager, closing
@@ -62,8 +64,11 @@ def filter_file(regex, repl, *filenames, **kwargs):
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
unescaped = repl.replace(r'\\', '\\')
- repl = lambda m: re.sub(
- r'\\([0-9])', lambda x: m.group(int(x.group(1))), unescaped)
+ def replace_groups_with_groupid(m):
+ def groupid_to_group(x):
+ return m.group(int(x.group(1)))
+ return re.sub(r'\\([1-9])', groupid_to_group, unescaped)
+ repl = replace_groups_with_groupid
if string:
regex = re.escape(regex)
@@ -127,10 +132,31 @@ def change_sed_delimiter(old_delim, new_delim, *filenames):
filter_file(double_quoted, '"%s"' % repl, f)
+def set_install_permissions(path):
+ """Set appropriate permissions on the installed file."""
+ if os.path.isdir(path):
+ os.chmod(path, 0755)
+ else:
+ os.chmod(path, 0644)
+
+
def install(src, dest):
"""Manually install a file to a particular location."""
tty.info("Installing %s to %s" % (src, dest))
shutil.copy(src, dest)
+ set_install_permissions(dest)
+
+ src_mode = os.stat(src).st_mode
+ dest_mode = os.stat(dest).st_mode
+ if src_mode | stat.S_IXUSR: dest_mode |= stat.S_IXUSR
+ if src_mode | stat.S_IXGRP: dest_mode |= stat.S_IXGRP
+ if src_mode | stat.S_IXOTH: dest_mode |= stat.S_IXOTH
+ os.chmod(dest, dest_mode)
+
+
+def is_exe(path):
+ """True if path is an executable file."""
+ return os.path.isfile(path) and os.access(path, os.X_OK)
def expand_user(path):
@@ -152,6 +178,15 @@ def mkdirp(*paths):
raise OSError(errno.EEXIST, "File alredy exists", path)
+def force_remove(*paths):
+ """Remove files without printing errors. Like rm -f, does NOT
+ remove directories."""
+ for path in paths:
+ try:
+ os.remove(path)
+ except OSError, e:
+ pass
+
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
@@ -169,6 +204,12 @@ def touch(path):
os.utime(path, None)
+def touchp(path):
+ """Like touch, but creates any parent directories needed for the file."""
+ mkdirp(os.path.dirname(path))
+ touch(path)
+
+
def join_path(prefix, *args):
path = str(prefix)
for elt in args:
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index ce7d0197f0..332367f537 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -68,6 +68,12 @@ def index_by(objects, *funcs):
index1 = index_by(list_of_specs, 'arch', 'compiler')
index2 = index_by(list_of_specs, 'compiler')
+
+ You can also index by tuples by passing tuples:
+
+ index1 = index_by(list_of_specs, ('arch', 'compiler'))
+
+ Keys in the resulting dict will look like ('gcc', 'bgqos_0').
"""
if not funcs:
return objects
@@ -75,6 +81,8 @@ def index_by(objects, *funcs):
f = funcs[0]
if isinstance(f, basestring):
f = lambda x: getattr(x, funcs[0])
+ elif isinstance(f, tuple):
+ f = lambda x: tuple(getattr(x, p) for p in funcs[0])
result = {}
for o in objects:
@@ -261,6 +269,59 @@ def in_function(function_name):
del stack
+def check_kwargs(kwargs, fun):
+ """Helper for making functions with kwargs. Checks whether the kwargs
+ are empty after all of them have been popped off. If they're
+ not, raises an error describing which kwargs are invalid.
+
+ Example::
+
+ def foo(self, **kwargs):
+ x = kwargs.pop('x', None)
+ y = kwargs.pop('y', None)
+ z = kwargs.pop('z', None)
+ check_kwargs(kwargs, self.foo)
+
+ # This raises a TypeError:
+ foo(w='bad kwarg')
+ """
+ if kwargs:
+ raise TypeError(
+ "'%s' is an invalid keyword argument for function %s()."
+ % (next(kwargs.iterkeys()), fun.__name__))
+
+
+def match_predicate(*args):
+ """Utility function for making string matching predicates.
+
+ Each arg can be a:
+ - regex
+ - list or tuple of regexes
+ - predicate that takes a string.
+
+ This returns a predicate that is true if:
+ - any arg regex matches
+ - any regex in a list or tuple of regexes matches.
+ - any predicate in args matches.
+ """
+ def match(string):
+ for arg in args:
+ if isinstance(arg, basestring):
+ if re.search(arg, string):
+ return True
+ elif isinstance(arg, list) or isinstance(arg, tuple):
+ if any(re.search(i, string) for i in arg):
+ return True
+ elif callable(arg):
+ if arg(string):
+ return True
+ else:
+ raise ValueError("args to match_predicate must be regex, "
+ "list of regexes, or callable.")
+ return False
+ return match
+
+
class RequiredAttributeError(ValueError):
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)
diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py
new file mode 100644
index 0000000000..4d778eca1e
--- /dev/null
+++ b/lib/spack/llnl/util/link_tree.py
@@ -0,0 +1,197 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""LinkTree class for setting up trees of symbolic links."""
+__all__ = ['LinkTree']
+
+import os
+import shutil
+from llnl.util.filesystem import *
+
+empty_file_name = '.spack-empty'
+
+
+def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
+ """Traverse two filesystem trees simultaneously.
+
+ Walks the LinkTree directory in pre or post order. Yields each
+ file in the source directory with a matching path from the dest
+ directory, along with whether the file is a directory.
+ e.g., for this tree::
+
+ root/
+ a/
+ file1
+ file2
+ b/
+ file3
+
+ When called on dest, this yields::
+
+ ('root', 'dest')
+ ('root/a', 'dest/a')
+ ('root/a/file1', 'dest/a/file1')
+ ('root/a/file2', 'dest/a/file2')
+ ('root/b', 'dest/b')
+ ('root/b/file3', 'dest/b/file3')
+
+ Optional args:
+
+ order=[pre|post] -- Whether to do pre- or post-order traveral.
+
+ ignore=<predicate> -- Predicate indicating which files to ignore.
+
+ follow_nonexisting -- Whether to descend into directories in
+ src that do not exit in dest. Default True.
+
+ follow_links -- Whether to descend into symlinks in src.
+
+ """
+ follow_nonexisting = kwargs.get('follow_nonexisting', True)
+ follow_links = kwargs.get('follow_link', False)
+
+ # Yield in pre or post order?
+ order = kwargs.get('order', 'pre')
+ if order not in ('pre', 'post'):
+ raise ValueError("Order must be 'pre' or 'post'.")
+
+ # List of relative paths to ignore under the src root.
+ ignore = kwargs.get('ignore', lambda filename: False)
+
+ # Don't descend into ignored directories
+ if ignore(rel_path):
+ return
+
+ source_path = os.path.join(source_root, rel_path)
+ dest_path = os.path.join(dest_root, rel_path)
+
+ # preorder yields directories before children
+ if order == 'pre':
+ yield (source_path, dest_path)
+
+ for f in os.listdir(source_path):
+ source_child = os.path.join(source_path, f)
+ dest_child = os.path.join(dest_path, f)
+ rel_child = os.path.join(rel_path, f)
+
+ # Treat as a directory
+ if os.path.isdir(source_child) and (
+ follow_links or not os.path.islink(source_child)):
+
+ # When follow_nonexisting isn't set, don't descend into dirs
+ # in source that do not exist in dest
+ if follow_nonexisting or os.path.exists(dest_child):
+ tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
+ for t in tuples: yield t
+
+ # Treat as a file.
+ elif not ignore(os.path.join(rel_path, f)):
+ yield (source_child, dest_child)
+
+ if order == 'post':
+ yield (source_path, dest_path)
+
+
+
+class LinkTree(object):
+ """Class to create trees of symbolic links from a source directory.
+
+ LinkTree objects are constructed with a source root. Their
+ methods allow you to create and delete trees of symbolic links
+ back to the source tree in specific destination directories.
+ Trees comprise symlinks only to files; directries are never
+ symlinked to, to prevent the source directory from ever being
+ modified.
+
+ """
+ def __init__(self, source_root):
+ if not os.path.exists(source_root):
+ raise IOError("No such file or directory: '%s'", source_root)
+
+ self._root = source_root
+
+
+ def find_conflict(self, dest_root, **kwargs):
+ """Returns the first file in dest that conflicts with src"""
+ kwargs['follow_nonexisting'] = False
+ for src, dest in traverse_tree(self._root, dest_root, **kwargs):
+ if os.path.isdir(src):
+ if os.path.exists(dest) and not os.path.isdir(dest):
+ return dest
+ elif os.path.exists(dest):
+ return dest
+ return None
+
+
+ def merge(self, dest_root, **kwargs):
+ """Link all files in src into dest, creating directories if necessary."""
+ kwargs['order'] = 'pre'
+ for src, dest in traverse_tree(self._root, dest_root, **kwargs):
+ if os.path.isdir(src):
+ if not os.path.exists(dest):
+ mkdirp(dest)
+ continue
+
+ if not os.path.isdir(dest):
+ raise ValueError("File blocks directory: %s" % dest)
+
+ # mark empty directories so they aren't removed on unmerge.
+ if not os.listdir(dest):
+ marker = os.path.join(dest, empty_file_name)
+ touch(marker)
+
+ else:
+ assert(not os.path.exists(dest))
+ os.symlink(src, dest)
+
+
+ def unmerge(self, dest_root, **kwargs):
+ """Unlink all files in dest that exist in src.
+
+ Unlinks directories in dest if they are empty.
+
+ """
+ kwargs['order'] = 'post'
+ for src, dest in traverse_tree(self._root, dest_root, **kwargs):
+ if os.path.isdir(src):
+ # Skip non-existing links.
+ if not os.path.exists(dest):
+ continue
+
+ if not os.path.isdir(dest):
+ raise ValueError("File blocks directory: %s" % dest)
+
+ # remove directory if it is empty.
+ if not os.listdir(dest):
+ shutil.rmtree(dest, ignore_errors=True)
+
+ # remove empty dir marker if present.
+ marker = os.path.join(dest, empty_file_name)
+ if os.path.exists(marker):
+ os.remove(marker)
+
+ elif os.path.exists(dest):
+ if not os.path.islink(dest):
+ raise ValueError("%s is not a link tree!" % dest)
+ os.remove(dest)
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index 40cae11200..aba9e61f4f 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -25,6 +25,9 @@
import sys
import os
import textwrap
+import fcntl
+import termios
+import struct
from StringIO import StringIO
from llnl.util.tty.color import *
@@ -114,21 +117,46 @@ def get_number(prompt, **kwargs):
return number
+def get_yes_or_no(prompt, **kwargs):
+ default_value = kwargs.get('default', None)
+
+ if default_value is None:
+ prompt += ' [y/n] '
+ elif default_value is True:
+ prompt += ' [Y/n] '
+ elif default_value is False:
+ prompt += ' [y/N] '
+ else:
+ raise ValueError("default for get_yes_no() must be True, False, or None.")
+
+ result = None
+ while result is None:
+ ans = raw_input(prompt).lower()
+ if not ans:
+ result = default_value
+ if result is None:
+ print "Please enter yes or no."
+ else:
+ if ans == 'y' or ans == 'yes':
+ result = True
+ elif ans == 'n' or ans == 'no':
+ result = False
+ return result
+
+
def hline(label=None, **kwargs):
- """Draw an optionally colored or labeled horizontal line.
+ """Draw a labeled horizontal line.
Options:
-
char Char to draw the line with. Default '-'
- color Color of the label. Default is no color.
max_width Maximum width of the line. Default is 64 chars.
-
- See tty.color for possible color formats.
"""
- char = kwargs.get('char', '-')
- color = kwargs.get('color', '')
- max_width = kwargs.get('max_width', 64)
+ char = kwargs.pop('char', '-')
+ max_width = kwargs.pop('max_width', 64)
+ if kwargs:
+ raise TypeError("'%s' is an invalid keyword argument for this function."
+ % next(kwargs.iterkeys()))
- cols, rows = terminal_size()
+ rows, cols = terminal_size()
if not cols:
cols = max_width
else:
@@ -136,37 +164,34 @@ def hline(label=None, **kwargs):
cols = min(max_width, cols)
label = str(label)
- prefix = char * 2 + " " + label + " "
- suffix = (cols - len(prefix)) * char
+ prefix = char * 2 + " "
+ suffix = " " + (cols - len(prefix) - clen(label)) * char
out = StringIO()
- if color:
- prefix = char * 2 + " " + color + cescape(label) + "@. "
- cwrite(prefix, stream=out, color=True)
- else:
- out.write(prefix)
+ out.write(prefix)
+ out.write(label)
out.write(suffix)
print out.getvalue()
def terminal_size():
- """Gets the dimensions of the console: cols, rows."""
+ """Gets the dimensions of the console: (rows, cols)."""
def ioctl_GWINSZ(fd):
try:
- cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
+ rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
except:
return
- return cr
- cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
- if not cr:
+ return rc
+ rc = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
+ if not rc:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
- cr = ioctl_GWINSZ(fd)
+ rc = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
- if not cr:
- cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
+ if not rc:
+ rc = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
- return int(cr[1]), int(cr[0])
+ return int(rc[0]), int(rc[1])
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index ff06241937..66c52c3968 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -22,16 +22,9 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-# colify
-# By Todd Gamblin, tgamblin@llnl.gov
-#
-# Takes a list of items as input and finds a good columnization of them,
-# similar to how gnu ls does. You can pipe output to this script and
-# get a tight display for it. This supports both uniform-width and
-# variable-width (tighter) columns.
-#
-# Run colify -h for more information.
-#
+"""
+Routines for printing columnar output. See colify() for more information.
+"""
import os
import sys
import fcntl
@@ -40,6 +33,7 @@ import struct
from StringIO import StringIO
from llnl.util.tty import terminal_size
+from llnl.util.tty.color import clen
class ColumnConfig:
@@ -47,32 +41,52 @@ class ColumnConfig:
self.cols = cols
self.line_length = 0
self.valid = True
- self.widths = [0] * cols
+ self.widths = [0] * cols # does not include ansi colors
+ self.cwidths = [0] * cols # includes ansi colors
def __repr__(self):
attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
-def config_variable_cols(elts, console_cols, padding):
+def config_variable_cols(elts, console_width, padding, cols=0):
+ """Variable-width column fitting algorithm.
+
+ This function determines the most columns that can fit in the
+ screen width. Unlike uniform fitting, where all columns take
+ the width of the longest element in the list, each column takes
+ the width of its own longest element. This packs elements more
+ efficiently on screen.
+
+ If cols is nonzero, force
+ """
+ if cols < 0:
+ raise ValueError("cols must be non-negative.")
+
# Get a bound on the most columns we could possibly have.
- lengths = [len(elt) for elt in elts]
- max_cols = max(1, console_cols / (min(lengths) + padding))
+ # 'clen' ignores length of ansi color sequences.
+ lengths = [clen(e) for e in elts]
+ clengths = [len(e) for e in elts]
+
+ max_cols = max(1, console_width / (min(lengths) + padding))
max_cols = min(len(elts), max_cols)
- configs = [ColumnConfig(c) for c in xrange(1, max_cols+1)]
- for elt, length in enumerate(lengths):
- for i, conf in enumerate(configs):
+ # Range of column counts to try. If forced, use the supplied value.
+ col_range = [cols] if cols else xrange(1, max_cols+1)
+
+ # Determine the most columns possible for the console width.
+ configs = [ColumnConfig(c) for c in col_range]
+ for i, length in enumerate(lengths):
+ for conf in configs:
if conf.valid:
- col = elt / ((len(elts) + i) / (i + 1))
- padded = length
- if col < i:
- padded += padding
+ col = i / ((len(elts) + conf.cols - 1) / conf.cols)
+ p = padding if col < (conf.cols - 1) else 0
- if conf.widths[col] < padded:
- conf.line_length += padded - conf.widths[col]
- conf.widths[col] = padded
- conf.valid = (conf.line_length < console_cols)
+ if conf.widths[col] < (length + p):
+ conf.line_length += length + p - conf.widths[col]
+ conf.widths[col] = length + p
+ conf.cwidths[col] = clengths[i] + p
+ conf.valid = (conf.line_length < console_width)
try:
config = next(conf for conf in reversed(configs) if conf.valid)
@@ -85,57 +99,107 @@ def config_variable_cols(elts, console_cols, padding):
return config
-def config_uniform_cols(elts, console_cols, padding):
- max_len = max(len(elt) for elt in elts) + padding
- cols = max(1, console_cols / max_len)
- cols = min(len(elts), cols)
+def config_uniform_cols(elts, console_width, padding, cols=0):
+ """Uniform-width column fitting algorithm.
+
+ Determines the longest element in the list, and determines how
+ many columns of that width will fit on screen. Returns a
+ corresponding column config.
+ """
+ if cols < 0:
+ raise ValueError("cols must be non-negative.")
+
+ # 'clen' ignores length of ansi color sequences.
+ max_len = max(clen(e) for e in elts) + padding
+ max_clen = max(len(e) for e in elts) + padding
+ if cols == 0:
+ cols = max(1, console_width / max_len)
+ cols = min(len(elts), cols)
+
config = ColumnConfig(cols)
config.widths = [max_len] * cols
+ config.cwidths = [max_clen] * cols
+
return config
-def isatty(ostream):
- force = os.environ.get('COLIFY_TTY', 'false').lower() != 'false'
- return force or ostream.isatty()
+def colify(elts, **options):
+ """Takes a list of elements as input and finds a good columnization
+ of them, similar to how gnu ls does. This supports both
+ uniform-width and variable-width (tighter) columns.
+ If elts is not a list of strings, each element is first conveted
+ using str().
-def colify(elts, **options):
+ Keyword arguments:
+
+ output=<stream> A file object to write to. Default is sys.stdout.
+ indent=<int> Optionally indent all columns by some number of spaces.
+ padding=<int> Spaces between columns. Default is 2.
+ width=<int> Width of the output. Default is 80 if tty is not detected.
+
+ cols=<int> Force number of columns. Default is to size to terminal,
+ or single-column if no tty
+
+ tty=<bool> Whether to attempt to write to a tty. Default is to
+ autodetect a tty. Set to False to force single-column output.
+
+ method=<string> Method to use to fit columns. Options are variable or uniform.
+ Variable-width columns are tighter, uniform columns are all the
+ same width and fit less data on the screen.
+
+ len=<func> Function to use for calculating string length.
+ Useful for ignoring ansi color. Default is 'len'.
+ """
# Get keyword arguments or set defaults
- output = options.get("output", sys.stdout)
- indent = options.get("indent", 0)
- padding = options.get("padding", 2)
- tty = options.get('tty', None)
+ cols = options.pop("cols", 0)
+ output = options.pop("output", sys.stdout)
+ indent = options.pop("indent", 0)
+ padding = options.pop("padding", 2)
+ tty = options.pop('tty', None)
+ method = options.pop("method", "variable")
+ console_cols = options.pop("width", None)
+
+ if options:
+ raise TypeError("'%s' is an invalid keyword argument for this function."
+ % next(options.iterkeys()))
# elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts]
if not elts:
return (0, ())
- if not tty:
- if tty is False or not isatty(output):
- for elt in elts:
- output.write("%s\n" % elt)
+ # environment size is of the form "<rows>x<cols>"
+ env_size = os.environ.get('COLIFY_SIZE')
+ if env_size:
+ try:
+ r, c = env_size.split('x')
+ console_rows, console_cols = int(r), int(c)
+ tty = True
+ except: pass
- maxlen = max(len(str(s)) for s in elts)
- return (1, (maxlen,))
+ # Use only one column if not a tty.
+ if not tty:
+ if tty is False or not output.isatty():
+ cols = 1
- console_cols = options.get("cols", None)
+ # Specify the number of character columns to use.
if not console_cols:
- console_cols, console_rows = terminal_size()
+ console_rows, console_cols = terminal_size()
elif type(console_cols) != int:
raise ValueError("Number of columns must be an int")
console_cols = max(1, console_cols - indent)
- method = options.get("method", "variable")
+ # Choose a method. Variable-width colums vs uniform-width.
if method == "variable":
- config = config_variable_cols(elts, console_cols, padding)
+ config = config_variable_cols(elts, console_cols, padding, cols)
elif method == "uniform":
- config = config_uniform_cols(elts, console_cols, padding)
+ config = config_uniform_cols(elts, console_cols, padding, cols)
else:
raise ValueError("method must be one of: " + allowed_methods)
cols = config.cols
- formats = ["%%-%ds" % width for width in config.widths[:-1]]
+ formats = ["%%-%ds" % width for width in config.cwidths[:-1]]
formats.append("%s") # last column has no trailing space
rows = (len(elts) + cols - 1) / cols
@@ -155,6 +219,25 @@ def colify(elts, **options):
return (config.cols, tuple(config.widths))
+def colify_table(table, **options):
+ if table is None:
+ raise TypeError("Can't call colify_table on NoneType")
+ elif not table or not table[0]:
+ raise ValueError("Table is empty in colify_table!")
+
+ columns = len(table[0])
+ def transpose():
+ for i in xrange(columns):
+ for row in table:
+ yield row[i]
+
+ if 'cols' in options:
+ raise ValueError("Cannot override columsn in colify_table.")
+ options['cols'] = columns
+
+ colify(transpose(), **options)
+
+
def colified(elts, **options):
"""Invokes the colify() function but returns the result as a string
instead of writing it to an output string."""
@@ -162,29 +245,3 @@ def colified(elts, **options):
options['output'] = sio
colify(elts, **options)
return sio.getvalue()
-
-
-if __name__ == "__main__":
- import optparse
-
- cols, rows = terminal_size()
- parser = optparse.OptionParser()
- parser.add_option("-u", "--uniform", action="store_true", default=False,
- help="Use uniformly sized columns instead of variable-size.")
- parser.add_option("-p", "--padding", metavar="PADDING", action="store",
- type=int, default=2, help="Spaces to add between columns. Default is 2.")
- parser.add_option("-i", "--indent", metavar="SPACES", action="store",
- type=int, default=0, help="Indent the output by SPACES. Default is 0.")
- parser.add_option("-w", "--width", metavar="COLS", action="store",
- type=int, default=cols, help="Indent the output by SPACES. Default is 0.")
- options, args = parser.parse_args()
-
- method = "variable"
- if options.uniform:
- method = "uniform"
-
- if sys.stdin.isatty():
- parser.print_help()
- sys.exit(1)
- else:
- colify([line.strip() for line in sys.stdin], method=method, **options.__dict__)
diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py
index 14974a1014..81688d7f14 100644
--- a/lib/spack/llnl/util/tty/color.py
+++ b/lib/spack/llnl/util/tty/color.py
@@ -149,6 +149,11 @@ def colorize(string, **kwargs):
return re.sub(color_re, match_to_ansi(color), string)
+def clen(string):
+ """Return the length of a string, excluding ansi color sequences."""
+ return len(re.sub(r'\033[^m]*m', '', string))
+
+
def cwrite(string, stream=sys.stdout, color=None):
"""Replace all color expressions in string with ANSI control
codes and write the result to the stream. If color is
@@ -172,17 +177,20 @@ def cescape(string):
class ColorStream(object):
def __init__(self, stream, color=None):
- self.__class__ = type(stream.__class__.__name__,
- (self.__class__, stream.__class__), {})
- self.__dict__ = stream.__dict__
- self.color = color
- self.stream = stream
+ self._stream = stream
+ self._color = color
def write(self, string, **kwargs):
- if kwargs.get('raw', False):
- super(ColorStream, self).write(string)
- else:
- cwrite(string, self.stream, self.color)
+ raw = kwargs.get('raw', False)
+ raw_write = getattr(self._stream, 'write')
+
+ color = self._color
+ if self._color is None:
+ if raw:
+ color=True
+ else:
+ color = self._stream.isatty()
+ raw_write(colorize(string, color=color))
def writelines(self, sequence, **kwargs):
raw = kwargs.get('raw', False)
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index da7088640f..eb891e3d57 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -26,7 +26,7 @@ import os
import tempfile
from llnl.util.filesystem import *
-# This lives in $prefix/lib/spac/spack/__file__
+# This lives in $prefix/lib/spack/spack/__file__
prefix = ancestor(__file__, 4)
# The spack script itself
@@ -78,7 +78,7 @@ concretizer = DefaultConcretizer()
# Version information
from spack.version import Version
-spack_version = Version("0.8")
+spack_version = Version("0.8.15")
#
# Executables used by Spack
@@ -138,7 +138,7 @@ sys_type = None
# should live. This file is overloaded for spack core vs. for packages.
#
__all__ = ['Package', 'Version', 'when', 'ver']
-from spack.package import Package
+from spack.package import Package, ExtensionConflictError
from spack.version import Version, ver
from spack.multimethod import when
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index 9b4c6e576d..0c4b605e91 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -65,7 +65,7 @@ def get_mac_sys_type():
if not mac_ver:
return None
- return "macosx_{}_{}".format(
+ return "macosx_%s_%s" % (
Version(mac_ver).up_to(2), py_platform.machine())
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index b71c543e5d..59b25d96e7 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -28,6 +28,7 @@ Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
import os
+import sys
import shutil
import multiprocessing
import platform
@@ -48,12 +49,11 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
# set_build_environment_variables and used to pass parameters to
# Spack's compiler wrappers.
#
-SPACK_LIB = 'SPACK_LIB'
SPACK_ENV_PATH = 'SPACK_ENV_PATH'
SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
SPACK_PREFIX = 'SPACK_PREFIX'
SPACK_DEBUG = 'SPACK_DEBUG'
-SPACK_SPEC = 'SPACK_SPEC'
+SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
@@ -108,9 +108,6 @@ def set_compiler_environment_variables(pkg):
def set_build_environment_variables(pkg):
"""This ensures a clean install environment when we build packages.
"""
- # This tells the compiler script where to find the Spack installation.
- os.environ[SPACK_LIB] = spack.lib_path
-
# Add spack build environment path with compiler wrappers first in
# the path. We handle case sensitivity conflicts like "CC" and
# "cc" by putting one in the <build_env_path>/case-insensitive
@@ -140,7 +137,7 @@ def set_build_environment_variables(pkg):
# Working directory for the spack command itself, for debug logs.
if spack.debug:
os.environ[SPACK_DEBUG] = "TRUE"
- os.environ[SPACK_SPEC] = str(pkg.spec)
+ os.environ[SPACK_SHORT_SPEC] = pkg.spec.short_spec
os.environ[SPACK_DEBUG_LOG_DIR] = spack.spack_working_dir
# Add dependencies to CMAKE_PREFIX_PATH
@@ -187,6 +184,10 @@ def set_module_variables_for_package(pkg):
if platform.mac_ver()[0]:
m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')
+ # Set up CMake rpath
+ m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
+ m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
+
# Emulate some shell commands for convenience
m.pwd = os.getcwd
m.cd = os.chdir
@@ -194,6 +195,7 @@ def set_module_variables_for_package(pkg):
m.makedirs = os.makedirs
m.remove = os.remove
m.removedirs = os.removedirs
+ m.symlink = os.symlink
m.mkdirp = mkdirp
m.install = install
@@ -203,3 +205,80 @@ def set_module_variables_for_package(pkg):
# Useful directories within the prefix are encapsulated in
# a Prefix object.
m.prefix = pkg.prefix
+
+
+def get_rpaths(pkg):
+ """Get a list of all the rpaths for a package."""
+ rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
+ rpaths.extend(d.prefix.lib for d in pkg.spec.traverse(root=False)
+ if os.path.isdir(d.prefix.lib))
+ rpaths.extend(d.prefix.lib64 for d in pkg.spec.traverse(root=False)
+ if os.path.isdir(d.prefix.lib64))
+ return rpaths
+
+
+def setup_package(pkg):
+ """Execute all environment setup routines."""
+ set_compiler_environment_variables(pkg)
+ set_build_environment_variables(pkg)
+ set_module_variables_for_package(pkg)
+
+ # Allow dependencies to set up environment as well.
+ for dep_spec in pkg.spec.traverse(root=False):
+ dep_spec.package.setup_dependent_environment(
+ pkg.module, dep_spec, pkg.spec)
+
+
+def fork(pkg, function):
+ """Fork a child process to do part of a spack build.
+
+ Arguments:
+
+ pkg -- pkg whose environemnt we should set up the
+ forked process for.
+ function -- arg-less function to run in the child process.
+
+ Usage:
+ def child_fun():
+ # do stuff
+ build_env.fork(pkg, child_fun)
+
+ Forked processes are run with the build environemnt set up by
+ spack.build_environment. This allows package authors to have
+ full control over the environment, etc. without offecting
+ other builds that might be executed in the same spack call.
+
+ If something goes wrong, the child process is expected toprint
+ the error and the parent process will exit with error as
+ well. If things go well, the child exits and the parent
+ carries on.
+ """
+ try:
+ pid = os.fork()
+ except OSError, e:
+ raise InstallError("Unable to fork build process: %s" % e)
+
+ if pid == 0:
+ # Give the child process the package's build environemnt.
+ setup_package(pkg)
+
+ try:
+ # call the forked function.
+ function()
+
+ # Use os._exit here to avoid raising a SystemExit exception,
+ # which interferes with unit tests.
+ os._exit(0)
+ except:
+ # Child doesn't raise or return to main spack code.
+ # Just runs default exception handler and exits.
+ sys.excepthook(*sys.exc_info())
+ os._exit(1)
+
+ else:
+ # Parent process just waits for the child to complete. If the
+ # child exited badly, assume it already printed an appropriate
+ # message. Just make the parent exit with an error code.
+ pid, returncode = os.waitpid(pid, 0)
+ if returncode != 0:
+ sys.exit(1)
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 537db536dd..b96ac5af51 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -121,3 +121,18 @@ def elide_list(line_list, max_num=10):
return line_list[:max_num-1] + ['...'] + line_list[-1:]
else:
return line_list
+
+
+def disambiguate_spec(spec):
+ matching_specs = spack.db.get_installed(spec)
+ if not matching_specs:
+ tty.die("Spec '%s' matches no installed packages." % spec)
+
+ elif len(matching_specs) > 1:
+ args = ["%s matches multiple packages." % spec,
+ "Matching packages:"]
+ args += [" " + str(s) for s in matching_specs]
+ args += ["Use a more specific spec."]
+ tty.die(*args)
+
+ return matching_specs[0]
diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py
new file mode 100644
index 0000000000..4070baaa70
--- /dev/null
+++ b/lib/spack/spack/cmd/activate.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from external import argparse
+import llnl.util.tty as tty
+import spack
+import spack.cmd
+
+description = "Activate a package extension."
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-f', '--force', action='store_true',
+ help="Activate without first activating dependencies.")
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
+
+
+def activate(parser, args):
+ # TODO: shouldn't have to concretize here. Fix DAG issues.
+ specs = spack.cmd.parse_specs(args.spec, concretize=True)
+ if len(specs) != 1:
+ tty.die("activate requires one spec. %d given." % len(specs))
+
+ # TODO: remove this hack when DAG info is stored in dir layout.
+ # This ensures the ext spec is always normalized properly.
+ spack.db.get(specs[0])
+
+ spec = spack.cmd.disambiguate_spec(specs[0])
+
+ if not spec.package.is_extension:
+ tty.die("%s is not an extension." % spec.name)
+
+ if spec.package.activated:
+ tty.die("Package %s is already activated." % specs[0].short_spec)
+
+ spec.package.do_activate()
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index f9218b9df1..8a448450c2 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -38,7 +38,7 @@ import spack.util.crypto
from spack.stage import Stage, FailedDownloadError
from spack.version import *
-description ="Checksum available versions of a package to update a package file."
+description ="Checksum available versions of a package."
def setup_parser(subparser):
subparser.add_argument(
@@ -56,7 +56,6 @@ def get_checksums(versions, urls, **kwargs):
first_stage_function = kwargs.get('first_stage_function', None)
keep_stage = kwargs.get('keep_stage', False)
-
tty.msg("Downloading...")
hashes = []
for i, (url, version) in enumerate(zip(urls, versions)):
@@ -85,24 +84,24 @@ def checksum(parser, args):
pkg = spack.db.get(args.package)
# If the user asked for specific versions, use those.
- versions = [ver(v) for v in args.versions]
-
- if not all(type(v) == Version for v in versions):
- tty.die("Cannot generate checksums for version lists or " +
- "version ranges. Use unambiguous versions.")
-
- if not versions:
- versions = pkg.fetch_available_versions()
+ if args.versions:
+ versions = {}
+ for v in args.versions:
+ v = ver(v)
+ if not isinstance(v, Version):
+ tty.die("Cannot generate checksums for version lists or " +
+ "version ranges. Use unambiguous versions.")
+ versions[v] = pkg.url_for_version(v)
+ else:
+ versions = pkg.fetch_remote_versions()
if not versions:
- tty.die("Could not fetch any available versions for %s." % pkg.name)
-
- versions = list(reversed(sorted(versions)))
- urls = [pkg.url_for_version(v) for v in versions]
+ tty.die("Could not fetch any versions for %s." % pkg.name)
+ sorted_versions = sorted(versions, reverse=True)
- tty.msg("Found %s versions of %s." % (len(urls), pkg.name),
+ tty.msg("Found %s versions of %s." % (len(versions), pkg.name),
*spack.cmd.elide_list(
- ["%-10s%s" % (v,u) for v, u in zip(versions, urls)]))
+ ["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
print
archives_to_fetch = tty.get_number(
"How many would you like to checksum?", default=5, abort='q')
@@ -112,10 +111,12 @@ def checksum(parser, args):
return
version_hashes = get_checksums(
- versions[:archives_to_fetch], urls[:archives_to_fetch], keep_stage=args.keep_stage)
+ sorted_versions[:archives_to_fetch],
+ [versions[v] for v in sorted_versions[:archives_to_fetch]],
+ keep_stage=args.keep_stage)
if not version_hashes:
- tty.die("Could not fetch any available versions for %s." % pkg.name)
+ tty.die("Could not fetch any versions for %s." % pkg.name)
version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py
index 79dd91c5bf..c20136ebe5 100644
--- a/lib/spack/spack/cmd/clean.py
+++ b/lib/spack/spack/cmd/clean.py
@@ -1,5 +1,5 @@
##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
@@ -28,39 +28,19 @@ import llnl.util.tty as tty
import spack
import spack.cmd
-import spack.stage as stage
-description = "Remove staged files for packages"
+description = "Remove build stage and source tarball for packages."
def setup_parser(subparser):
- subparser.add_argument('-c', "--clean", action="store_true", dest='clean',
- help="run make clean in the build directory (default)")
- subparser.add_argument('-w', "--work", action="store_true", dest='work',
- help="delete the build directory and re-expand it from its archive.")
- subparser.add_argument('-d', "--dist", action="store_true", dest='dist',
- help="delete the downloaded archive.")
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean")
def clean(parser, args):
if not args.packages:
- tty.die("spack clean requires at least one package argument")
+ tty.die("spack clean requires at least one package spec.")
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
package = spack.db.get(spec)
- if args.dist:
- package.do_clean_dist()
- tty.msg("Cleaned %s" % package.name)
-
- elif args.work:
- package.do_clean_work()
- tty.msg("Restaged %s" % package.name)
-
- else:
- try:
- package.do_clean()
- except subprocess.CalledProcessError, e:
- tty.warn("Warning: 'make clean' didn't work. Consider 'spack clean --work'.")
- tty.msg("Made clean for %s" % package.name)
+ package.do_clean()
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index ac9c844a4c..e37f44b3b7 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -25,13 +25,14 @@
from external import argparse
import llnl.util.tty as tty
+from llnl.util.tty.color import colorize
from llnl.util.tty.colify import colify
from llnl.util.lang import index_by
import spack.compilers
import spack.spec
import spack.config
-from spack.compilation import get_path
+from spack.util.environment import get_path
from spack.spec import CompilerSpec
description = "Manage compilers"
@@ -96,9 +97,12 @@ def compiler_info(args):
def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(), 'name')
- for name, compilers in index.items():
- tty.hline(name, char='-', color=spack.spec.compiler_color)
- colify(reversed(sorted(compilers)), indent=4)
+ for i, (name, compilers) in enumerate(index.items()):
+ if i >= 1: print
+
+ cname = "%s{%s}" % (spack.spec.compiler_color, name)
+ tty.hline(colorize(cname), char='-')
+ colify(reversed(sorted(compilers)))
def compiler(parser, args):
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 7ac10285a4..c09b9a3e36 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -28,6 +28,7 @@ import hashlib
import re
from contextlib import closing
+from external.ordereddict import OrderedDict
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -159,32 +160,33 @@ def create(parser, args):
else:
mkdirp(os.path.dirname(pkg_path))
- versions = list(reversed(spack.package.find_versions_of_archive(url)))
+ versions = spack.package.find_versions_of_archive(url)
+ rkeys = sorted(versions.keys(), reverse=True)
+ versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
archives_to_fetch = 1
if not versions:
# If the fetch failed for some reason, revert to what the user provided
- versions = [version]
- urls = [url]
- else:
- urls = [spack.url.substitute_version(url, v) for v in versions]
- if len(urls) > 1:
- tty.msg("Found %s versions of %s:" % (len(urls), name),
- *spack.cmd.elide_list(
- ["%-10s%s" % (v,u) for v, u in zip(versions, urls)]))
- print
- archives_to_fetch = tty.get_number(
- "Include how many checksums in the package file?",
- default=5, abort='q')
-
- if not archives_to_fetch:
- tty.msg("Aborted.")
- return
+ versions = { version : url }
+ elif len(versions) > 1:
+ tty.msg("Found %s versions of %s:" % (len(versions), name),
+ *spack.cmd.elide_list(
+ ["%-10s%s" % (v,u) for v, u in versions.iteritems()]))
+ print
+ archives_to_fetch = tty.get_number(
+ "Include how many checksums in the package file?",
+ default=5, abort='q')
+
+ if not archives_to_fetch:
+ tty.msg("Aborted.")
+ return
guesser = ConfigureGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
- versions[:archives_to_fetch], urls[:archives_to_fetch],
- first_stage_function=guesser, keep_stage=args.keep_stage)
+ versions.keys()[:archives_to_fetch],
+ [versions[v] for v in versions.keys()[:archives_to_fetch]],
+ first_stage_function=guesser,
+ keep_stage=args.keep_stage)
if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s." % name)
diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py
new file mode 100644
index 0000000000..c9a4d4b2f6
--- /dev/null
+++ b/lib/spack/spack/cmd/deactivate.py
@@ -0,0 +1,104 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from external import argparse
+import llnl.util.tty as tty
+
+import spack
+import spack.cmd
+from spack.graph import topological_sort
+
+description = "Deactivate a package extension."
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-f', '--force', action='store_true',
+ help="Run deactivation even if spec is NOT currently activated.")
+ subparser.add_argument(
+ '-a', '--all', action='store_true',
+ help="Deactivate all extensions of an extendable pacakge, or "
+ "deactivate an extension AND its dependencies.")
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
+
+
+def deactivate(parser, args):
+ # TODO: shouldn't have to concretize here. Fix DAG issues.
+ specs = spack.cmd.parse_specs(args.spec, concretize=True)
+ if len(specs) != 1:
+ tty.die("deactivate requires one spec. %d given." % len(specs))
+
+ # TODO: remove this hack when DAG info is stored properly.
+ # This ensures the ext spec is always normalized properly.
+ spack.db.get(specs[0])
+
+ spec = spack.cmd.disambiguate_spec(specs[0])
+ pkg = spec.package
+
+ if args.all:
+ if pkg.extendable:
+ tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
+ ext_pkgs = spack.db.installed_extensions_for(spec)
+
+ for ext_pkg in ext_pkgs:
+ ext_pkg.spec.normalize()
+ if ext_pkg.activated:
+ ext_pkg.do_deactivate(force=True)
+
+ elif pkg.is_extension:
+ # TODO: store DAG info properly (see above)
+ spec.normalize()
+
+ if not args.force and not spec.package.activated:
+ tty.die("%s is not activated." % pkg.spec.short_spec)
+
+ tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
+
+ topo_order = topological_sort(spec)
+ index = spec.index()
+
+ for name in topo_order:
+ espec = index[name]
+ epkg = espec.package
+
+ # TODO: store DAG info properly (see above)
+ epkg.spec.normalize()
+
+ if epkg.extends(pkg.extendee_spec):
+ if epkg.activated or args.force:
+
+ epkg.do_deactivate(force=args.force)
+
+ else:
+ tty.die("spack deactivate --all requires an extendable package or an extension.")
+
+ else:
+ if not pkg.is_extension:
+ tty.die("spack deactivate requires an extension.",
+ "Did you mean 'spack deactivate --all'?")
+
+ if not args.force and not spec.package.activated:
+ tty.die("Package %s is not activated." % specs[0].short_spec)
+
+ spec.package.do_deactivate(force=args.force)
diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py
new file mode 100644
index 0000000000..bde76b5daf
--- /dev/null
+++ b/lib/spack/spack/cmd/env.py
@@ -0,0 +1,69 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+from external import argparse
+import llnl.util.tty as tty
+import spack.cmd
+import spack.build_environment as build_env
+
+description = "Run a command with the environment for a particular spec's install."
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.")
+
+
+def env(parser, args):
+ if not args.spec:
+ tty.die("spack env requires a spec.")
+
+ # Specs may have spaces in them, so if they do, require that the
+ # caller put a '--' between the spec and the command to be
+ # executed. If there is no '--', assume that the spec is the
+ # first argument.
+ sep = '--'
+ if sep in args.spec:
+ s = args.spec.index(sep)
+ spec = args.spec[:s]
+ cmd = args.spec[s+1:]
+ else:
+ spec = args.spec[0]
+ cmd = args.spec[1:]
+
+ specs = spack.cmd.parse_specs(spec, concretize=True)
+ if len(specs) > 1:
+ tty.die("spack env only takes one spec.")
+ spec = specs[0]
+
+ build_env.setup_package(spec.package)
+
+ if not cmd:
+ # If no command act like the "env" command and print out env vars.
+ for key, val in os.environ.items():
+ print "%s=%s" % (key, val)
+
+ else:
+ # Otherwise execute the command with the new environment
+ os.execvp(cmd[0], cmd)
diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py
new file mode 100644
index 0000000000..fc8e6842c3
--- /dev/null
+++ b/lib/spack/spack/cmd/extensions.py
@@ -0,0 +1,98 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import sys
+from external import argparse
+
+import llnl.util.tty as tty
+from llnl.util.tty.colify import colify
+
+import spack
+import spack.cmd
+import spack.cmd.find
+
+description = "List extensions for package."
+
+def setup_parser(subparser):
+ format_group = subparser.add_mutually_exclusive_group()
+ format_group.add_argument(
+ '-l', '--long', action='store_const', dest='mode', const='long',
+ help='Show dependency hashes as well as versions.')
+ format_group.add_argument(
+ '-p', '--paths', action='store_const', dest='mode', const='paths',
+ help='Show paths to extension install directories')
+ format_group.add_argument(
+ '-d', '--deps', action='store_const', dest='mode', const='deps',
+ help='Show full dependency DAG of extensions')
+
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
+
+
+def extensions(parser, args):
+ if not args.spec:
+ tty.die("extensions requires a package spec.")
+
+ # Checks
+ spec = spack.cmd.parse_specs(args.spec)
+ if len(spec) > 1:
+ tty.die("Can only list extensions for one package.")
+
+ if not spec[0].package.extendable:
+ tty.die("%s is not an extendable package." % spec[0].name)
+
+ spec = spack.cmd.disambiguate_spec(spec[0])
+
+ if not spec.package.extendable:
+ tty.die("%s does not have extensions." % spec.short_spec)
+
+ if not args.mode:
+ args.mode = 'short'
+
+ # List package names of extensions
+ extensions = spack.db.extensions_for(spec)
+ if not extensions:
+ tty.msg("%s has no extensions." % spec.cshort_spec)
+ return
+ tty.msg(spec.cshort_spec)
+ tty.msg("%d extensions:" % len(extensions))
+ colify(ext.name for ext in extensions)
+
+ # List specs of installed extensions.
+ installed = [s.spec for s in spack.db.installed_extensions_for(spec)]
+ print
+ if not installed:
+ tty.msg("None installed.")
+ return
+ tty.msg("%d installed:" % len(installed))
+ spack.cmd.find.display_specs(installed, mode=args.mode)
+
+ # List specs of activated extensions.
+ activated = spack.install_layout.extension_map(spec)
+ print
+ if not activated:
+ tty.msg("None activated.")
+ return
+ tty.msg("%d currently activated:" % len(activated))
+ spack.cmd.find.display_specs(activated.values(), mode=args.mode)
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 2238484a21..70b10edb4e 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -24,13 +24,14 @@
##############################################################################
import sys
import collections
+import itertools
from external import argparse
from StringIO import StringIO
import llnl.util.tty as tty
-from llnl.util.tty.colify import colify
+from llnl.util.tty.colify import *
from llnl.util.tty.color import *
-from llnl.util.lang import partition_list, index_by
+from llnl.util.lang import *
import spack
import spack.spec
@@ -40,17 +41,64 @@ description ="Find installed spack packages"
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
- '-p', '--paths', action='store_true', dest='paths',
+ '-l', '--long', action='store_const', dest='mode', const='long',
+ help='Show dependency hashes as well as versions.')
+ format_group.add_argument(
+ '-p', '--paths', action='store_const', dest='mode', const='paths',
help='Show paths to package install directories')
format_group.add_argument(
- '-l', '--long', action='store_true', dest='full_specs',
- help='Show full-length specs of installed packages')
+ '-d', '--deps', action='store_const', dest='mode', const='deps',
+ help='Show full dependency DAG of installed packages')
subparser.add_argument(
'query_specs', nargs=argparse.REMAINDER,
help='optional specs to filter results')
+def display_specs(specs, **kwargs):
+ mode = kwargs.get('mode', 'short')
+
+ # Make a dict with specs keyed by architecture and compiler.
+ index = index_by(specs, ('architecture', 'compiler'))
+
+ # Traverse the index and print out each package
+ for i, (architecture, compiler) in enumerate(sorted(index)):
+ if i > 0: print
+
+ header = "%s{%s} / %s{%s}" % (
+ spack.spec.architecture_color, architecture,
+ spack.spec.compiler_color, compiler)
+ tty.hline(colorize(header), char='-')
+
+ specs = index[(architecture,compiler)]
+ specs.sort()
+
+ abbreviated = [s.format('$_$@$+', color=True) for s in specs]
+ if mode == 'paths':
+ # Print one spec per line along with prefix path
+ width = max(len(s) for s in abbreviated)
+ width += 2
+ format = " %-{}s%s".format(width)
+
+ for abbrv, spec in zip(abbreviated, specs):
+ print format % (abbrv, spec.prefix)
+
+ elif mode == 'deps':
+ for spec in specs:
+ print spec.tree(indent=4, format='$_$@$+$#', color=True),
+
+ elif mode in ('short', 'long'):
+ fmt = '$-_$@$+'
+ if mode == 'long':
+ fmt += '$#'
+ colify(s.format(fmt, color=True) for s in specs)
+
+ else:
+ raise ValueError(
+ "Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode)
+
+
+
def find(parser, args):
# Filter out specs that don't exist.
query_specs = spack.cmd.parse_specs(args.query_specs)
@@ -65,39 +113,17 @@ def find(parser, args):
if not query_specs:
return
- specs = [s for s in spack.db.installed_package_specs()
- if not query_specs or any(s.satisfies(q) for q in query_specs)]
+ # Get all the specs the user asked for
+ if not query_specs:
+ specs = set(spack.db.installed_package_specs())
+ else:
+ results = [set(spack.db.get_installed(qs)) for qs in query_specs]
+ specs = set.union(*results)
- # Make a dict with specs keyed by architecture and compiler.
- index = index_by(specs, 'architecture', 'compiler')
+ if not args.mode:
+ args.mode = 'short'
+
+ if sys.stdout.isatty():
+ tty.msg("%d installed packages." % len(specs))
+ display_specs(specs, mode=args.mode)
- # Traverse the index and print out each package
- for architecture in index:
- tty.hline(architecture, char='=', color=spack.spec.architecture_color)
- for compiler in index[architecture]:
- tty.hline(compiler, char='-', color=spack.spec.compiler_color)
-
- specs = index[architecture][compiler]
- specs.sort()
-
- abbreviated = [s.format('$_$@$+$#', color=True) for s in specs]
-
- if args.paths:
- # Print one spec per line along with prefix path
- width = max(len(s) for s in abbreviated)
- width += 2
- format = " %-{}s%s".format(width)
-
- for abbrv, spec in zip(abbreviated, specs):
- print format % (abbrv, spec.prefix)
-
- elif args.full_specs:
- for spec in specs:
- print spec.tree(indent=4, format='$_$@$+', color=True),
- else:
- max_len = max([len(s.name) for s in specs])
- max_len += 4
-
- for spec in specs:
- format = '$-' + str(max_len) + '_$@$+$#'
- print " " + spec.format(format, color=True)
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index 39dbfbb150..cb93a1b543 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -22,9 +22,45 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from external import argparse
+
import spack
+import spack.cmd
+from spack.graph import *
+
+description = "Generate graphs of package dependency relationships."
+
+def setup_parser(subparser):
+ setup_parser.parser = subparser
+
+ method = subparser.add_mutually_exclusive_group()
+ method.add_argument(
+ '--ascii', action='store_true',
+ help="Draw graph as ascii to stdout (default).")
+ method.add_argument(
+ '--dot', action='store_true',
+ help="Generate graph in dot format and print to stdout.")
+
+ subparser.add_argument(
+ '--concretize', action='store_true', help="Concretize specs before graphing.")
+
+ subparser.add_argument(
+ 'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.")
-description = "Write out inter-package dependencies in dot graph format"
def graph(parser, args):
- spack.db.graph_dependencies()
+ specs = spack.cmd.parse_specs(
+ args.specs, normalize=True, concretize=args.concretize)
+
+ if not specs:
+ setup_parser.parser.print_help()
+ return 1
+
+ if args.dot: # Dot graph only if asked for.
+ graph_dot(*specs)
+
+ elif specs: # ascii is default: user doesn't need to provide it explicitly
+ graph_ascii(specs[0], debug=spack.debug)
+ for spec in specs[1:]:
+ print # extra line bt/w independent graphs
+ graph_ascii(spec, debug=spack.debug)
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 29568b8c5d..eafafc601a 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -22,94 +22,18 @@
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import re
-import textwrap
-from StringIO import StringIO
from llnl.util.tty.colify import *
import spack
import spack.fetch_strategy as fs
description = "Get detailed information on a particular package"
-def setup_parser(subparser):
- subparser.add_argument('-r', '--rst', action='store_true',
- help="List all packages in reStructured text, for docs.")
- subparser.add_argument('name', metavar="PACKAGE", nargs='?', help="name of packages to get info on")
-
-
-def format_doc(pkg, **kwargs):
- """Wrap doc string at 72 characters and format nicely"""
- indent = kwargs.get('indent', 0)
-
- if not pkg.__doc__:
- return ""
-
- doc = re.sub(r'\s+', ' ', pkg.__doc__)
- lines = textwrap.wrap(doc, 72)
- results = StringIO()
- for line in lines:
- results.write((" " * indent) + line + "\n")
- return results.getvalue()
-
-
-def github_url(pkg):
- """Link to a package file on github."""
- return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" %
- pkg.name)
-
-
-def rst_table(elts):
- """Print out a RST-style table."""
- cols = StringIO()
- ncol, widths = colify(elts, output=cols, tty=True)
- header = " ".join("=" * (w-1) for w in widths)
- return "%s\n%s%s" % (header, cols.getvalue(), header)
-
-def info_rst():
- """Print out information on all packages in restructured text."""
- pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
-
- print "Package List"
- print "=================="
-
- print "This is a list of things you can install using Spack. It is"
- print "automatically generated based on the packages in the latest Spack"
- print "release."
- print
-
- print "Spack currently has %d mainline packages:" % len(pkgs)
- print
- print rst_table("`%s`_" % p.name for p in pkgs)
- print
- print "-----"
-
- # Output some text for each package.
- for pkg in pkgs:
- print
- print ".. _%s:" % pkg.name
- print
- print pkg.name
- print "-" * len(pkg.name)
- print "Links"
- print " * `Homepage <%s>`__" % pkg.homepage
- print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg))
- print
- if pkg.versions:
- print "Versions:"
- print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
- if pkg.dependencies:
- print "Dependencies"
- print " " + ", ".join("`%s`_" % d if d != "mpi" else d
- for d in pkg.dependencies)
- print
- print "Description"
- print format_doc(pkg, indent=2)
- print
- print "-----"
+def setup_parser(subparser):
+ subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
-def info_text(pkg):
+def print_text_info(pkg):
"""Print out a plain text description of a package."""
print "Package: ", pkg.name
print "Homepage: ", pkg.homepage
@@ -144,17 +68,11 @@ def info_text(pkg):
print
print "Description:"
if pkg.__doc__:
- print format_doc(pkg, indent=4)
+ print pkg.format_doc(indent=4)
else:
print " None"
def info(parser, args):
- if args.rst:
- info_rst()
-
- else:
- if not args.name:
- tty.die("You must supply a package name.")
- pkg = spack.db.get(args.name)
- info_text(pkg)
+ pkg = spack.db.get(args.name)
+ print_text_info(pkg)
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 2374d02feb..2c2deb2803 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -44,6 +44,9 @@ def setup_parser(subparser):
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
+ '--fake', action='store_true', dest='fake',
+ help="Fake install. Just remove the prefix and touch a fake file in it.")
+ subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
@@ -59,4 +62,5 @@ def install(parser, args):
package = spack.db.get(spec)
package.do_install(keep_prefix=args.keep_prefix,
keep_stage=args.keep_stage,
- ignore_deps=args.ignore_deps)
+ ignore_deps=args.ignore_deps,
+ fake=args.fake)
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index 5c7051d6a9..1f0978a18e 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -61,5 +61,4 @@ def list(parser, args):
indent=0
if sys.stdout.isatty():
tty.msg("%d packages." % len(sorted_packages))
- indent=2
colify(sorted_packages, indent=indent)
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index 3fc05d471d..709e894b7f 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -23,6 +23,7 @@
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import sys
from external import argparse
import llnl.util.tty as tty
@@ -77,38 +78,30 @@ def location(parser, args):
tty.die("You must supply a spec.")
if len(specs) != 1:
tty.die("Too many specs. Supply only one.")
- spec = specs[0]
if args.install_dir:
# install_dir command matches against installed specs.
- matching_specs = spack.db.get_installed(spec)
- if not matching_specs:
- tty.die("Spec '%s' matches no installed packages." % spec)
+ spec = spack.cmd.disambiguate_spec(specs[0])
+ print spec.prefix
- elif len(matching_specs) > 1:
- args = ["%s matches multiple packages." % spec,
- "Matching packages:"]
- args += [" " + str(s) for s in matching_specs]
- args += ["Use a more specific spec."]
- tty.die(*args)
+ else:
+ spec = specs[0]
- print matching_specs[0].prefix
+ if args.package_dir:
+ # This one just needs the spec name.
+ print join_path(spack.db.root, spec.name)
- elif args.package_dir:
- # This one just needs the spec name.
- print join_path(spack.db.root, spec.name)
+ else:
+ # These versions need concretized specs.
+ spec.concretize()
+ pkg = spack.db.get(spec)
- else:
- # These versions need concretized specs.
- spec.concretize()
- pkg = spack.db.get(spec)
-
- if args.stage_dir:
- print pkg.stage.path
-
- else: # args.build_dir is the default.
- if not pkg.stage.source_path:
- tty.die("Build directory does not exist yet. Run this to create it:",
- "spack stage " + " ".join(args.spec))
- print pkg.stage.source_path
+ if args.stage_dir:
+ print pkg.stage.path
+
+ else: # args.build_dir is the default.
+ if not pkg.stage.source_path:
+ tty.die("Build directory does not exist yet. Run this to create it:",
+ "spack stage " + " ".join(args.spec))
+ print pkg.stage.source_path
diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py
index 496835c64b..dfa1be412b 100644
--- a/lib/spack/spack/cmd/md5.py
+++ b/lib/spack/spack/cmd/md5.py
@@ -41,6 +41,7 @@ def setup_parser(subparser):
def md5(parser, args):
if not args.files:
setup_parser.parser.print_help()
+ return 1
for f in args.files:
if not os.path.isfile(f):
diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py
new file mode 100644
index 0000000000..f048482845
--- /dev/null
+++ b/lib/spack/spack/cmd/package-list.py
@@ -0,0 +1,95 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import re
+import cgi
+from StringIO import StringIO
+import llnl.util.tty as tty
+from llnl.util.tty.colify import *
+import spack
+
+description = "Print a list of all packages in reStructuredText."
+
+
+def github_url(pkg):
+ """Link to a package file on github."""
+ return ("https://github.com/scalability-llnl/spack/blob/master/var/spack/packages/%s/package.py" %
+ pkg.name)
+
+
+def rst_table(elts):
+ """Print out a RST-style table."""
+ cols = StringIO()
+ ncol, widths = colify(elts, output=cols, tty=True)
+ header = " ".join("=" * (w-1) for w in widths)
+ return "%s\n%s%s" % (header, cols.getvalue(), header)
+
+
+def print_rst_package_list():
+ """Print out information on all packages in restructured text."""
+ pkgs = sorted(spack.db.all_packages(), key=lambda s:s.name.lower())
+
+ print ".. _package-list:"
+ print
+ print "Package List"
+ print "=================="
+
+ print "This is a list of things you can install using Spack. It is"
+ print "automatically generated based on the packages in the latest Spack"
+ print "release."
+ print
+
+ print "Spack currently has %d mainline packages:" % len(pkgs)
+ print
+ print rst_table("`%s`_" % p.name for p in pkgs)
+ print
+ print "-----"
+
+ # Output some text for each package.
+ for pkg in pkgs:
+ print
+ print ".. _%s:" % pkg.name
+ print
+ print pkg.name
+ print "-" * len(pkg.name)
+ print "Links:"
+ print " * `%s <%s>`__" % (cgi.escape(pkg.homepage), pkg.homepage)
+ print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg))
+ print
+ if pkg.versions:
+ print "Versions:"
+ print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
+ if pkg.dependencies:
+ print "Dependencies"
+ print " " + ", ".join("`%s`_" % d if d != "mpi" else d
+ for d in pkg.dependencies)
+ print
+ print "Description:"
+ print pkg.format_doc(indent=2)
+ print
+ print "-----"
+
+
+def package_list(parser, args):
+ print_rst_package_list()
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index 82ebd13ff9..055b7c2062 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -31,12 +31,16 @@ from llnl.util.tty.colify import colify
import spack
from spack.util.executable import *
-description = "Query packages associated with particular git revisions in spack."
+description = "Query packages associated with particular git revisions."
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command')
+ add_parser = sp.add_parser('add', help=pkg_add.__doc__)
+ add_parser.add_argument('packages', nargs=argparse.REMAINDER,
+ help="Names of packages to add to git repo.")
+
list_parser = sp.add_parser('list', help=pkg_list.__doc__)
list_parser.add_argument('rev', default='HEAD', nargs='?',
help="Revision to list packages for.")
@@ -79,6 +83,16 @@ def list_packages(rev):
return sorted(line[len(relpath):] for line in output.split('\n') if line)
+def pkg_add(args):
+ for pkg_name in args.packages:
+ filename = spack.db.filename_for_package_name(pkg_name)
+ if not os.path.isfile(filename):
+ tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
+
+ git = get_git()
+ git('-C', spack.packages_path, 'add', filename)
+
+
def pkg_list(args):
"""List packages associated with a particular spack git revision."""
colify(list_packages(args.rev))
@@ -117,7 +131,8 @@ def pkg_added(args):
def pkg(parser, args):
- action = { 'diff' : pkg_diff,
+ action = { 'add' : pkg_add,
+ 'diff' : pkg_diff,
'list' : pkg_list,
'removed' : pkg_removed,
'added' : pkg_added }
diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py
new file mode 100644
index 0000000000..e735a12c32
--- /dev/null
+++ b/lib/spack/spack/cmd/restage.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2014, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from external import argparse
+
+import llnl.util.tty as tty
+
+import spack
+import spack.cmd
+
+description = "Revert checked out package source code."
+
+def setup_parser(subparser):
+ subparser.add_argument('packages', nargs=argparse.REMAINDER,
+ help="specs of packages to restage")
+
+
+def restage(parser, args):
+ if not args.packages:
+ tty.die("spack restage requires at least one package spec.")
+
+ specs = spack.cmd.parse_specs(args.packages, concretize=True)
+ for spec in specs:
+ package = spack.db.get(spec)
+ package.do_restage()
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index 5fcb0a9b5a..e2cb5689c0 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -27,8 +27,8 @@ import spack.cmd
import llnl.util.tty as tty
-import spack.url as url
import spack
+import spack.url as url
description = "print out abstract and concrete versions of a spec."
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 84eb4703a6..6ded455390 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -65,20 +65,19 @@ def uninstall(parser, args):
" b) use a more specific spec."]
tty.die(*args)
-
if len(matching_specs) == 0:
+ if args.force: continue
tty.die("%s does not match any installed packages." % spec)
for s in matching_specs:
try:
# should work if package is known to spack
- pkgs.append(spack.db.get(s))
+ pkgs.append(s.package)
except spack.packages.UnknownPackageError, e:
# The package.py file has gone away -- but still want to uninstall.
spack.Package(s).do_uninstall(force=True)
-
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order
def num_installed_deps(pkg):
diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py
new file mode 100644
index 0000000000..417ce3ab68
--- /dev/null
+++ b/lib/spack/spack/cmd/urls.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import sys
+import spack
+import spack.url
+
+description = "Inspect urls used by packages in spack."
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-c', '--color', action='store_true',
+ help="Color the parsed version and name in the urls shown. "
+ "Version will be cyan, name red.")
+ subparser.add_argument(
+ '-e', '--extrapolation', action='store_true',
+ help="Color the versions used for extrapolation as well."
+ "Additional versions are green, names magenta.")
+
+
+def urls(parser, args):
+ urls = set()
+ for pkg in spack.db.all_packages():
+ url = getattr(pkg.__class__, 'url', None)
+ if url:
+ urls.add(url)
+
+ for params in pkg.versions.values():
+ url = params.get('url', None)
+ if url:
+ urls.add(url)
+
+ for url in sorted(urls):
+ if args.color or args.extrapolation:
+ print spack.url.color_url(url, subs=args.extrapolation, errors=True)
+ else:
+ print url
diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py
index c545035279..ed16728261 100644
--- a/lib/spack/spack/cmd/versions.py
+++ b/lib/spack/spack/cmd/versions.py
@@ -24,6 +24,7 @@
##############################################################################
import os
from llnl.util.tty.colify import colify
+import llnl.util.tty as tty
import spack
description ="List available versions of a package"
@@ -34,4 +35,21 @@ def setup_parser(subparser):
def versions(parser, args):
pkg = spack.db.get(args.package)
- colify(reversed(pkg.fetch_available_versions()))
+
+ safe_versions = pkg.versions
+ fetched_versions = pkg.fetch_remote_versions()
+ remote_versions = set(fetched_versions).difference(safe_versions)
+
+ tty.msg("Safe versions (already checksummed):")
+ colify(sorted(safe_versions, reverse=True), indent=2)
+
+ tty.msg("Remote versions (not yet checksummed):")
+ if not remote_versions:
+ if not fetched_versions:
+ print " Found no versions for %s" % pkg.name
+ tty.debug("Check the list_url and list_depth attribute on the "
+ "package to help Spack find versions.")
+ else:
+ print " Found no unckecksummed versions for %s" % pkg.name
+ else:
+ colify(sorted(remote_versions, reverse=True), indent=2)
diff --git a/lib/spack/spack/compilation.py b/lib/spack/spack/compilation.py
deleted file mode 100644
index 3a469376a8..0000000000
--- a/lib/spack/spack/compilation.py
+++ /dev/null
@@ -1,117 +0,0 @@
-##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://scalability-llnl.github.io/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-"""\
-The ``compilation`` module contains utility functions used by the compiler
-wrapper script.
-
-.. todo::
-
- Think about moving this into the script to increase compilation
- speed.
-
-"""
-import os
-import sys
-
-
-def get_env_var(name, required=True):
- value = os.environ.get(name)
- if required and value is None:
- print "%s must be run from spack." % os.path.abspath(sys.argv[0])
- sys.exit(1)
- return value
-
-
-def get_env_flag(name, required=False):
- value = get_env_var(name, required)
- if value:
- return value.lower() == "true"
- return False
-
-
-def get_path(name):
- path = os.environ.get(name, "").strip()
- if path:
- return path.split(":")
- else:
- return []
-
-
-def parse_rpaths(arguments):
- """argparse, for all its features, cannot understand most compilers'
- rpath arguments. This handles '-Wl,', '-Xlinker', and '-R'"""
- def get_next(arg, args):
- """Get an expected next value of an iterator, or die if it's not there"""
- try:
- return next(args)
- except StopIteration:
- # quietly ignore -rpath and -Xlinker without args.
- return None
-
- other_args = []
- def linker_args():
- """This generator function allows us to parse the linker args separately
- from the compiler args, so that we can handle them more naturally.
- """
- args = iter(arguments)
- for arg in args:
- if arg.startswith('-Wl,'):
- sub_args = [sub for sub in arg.replace('-Wl,', '', 1).split(',')]
- for arg in sub_args:
- yield arg
- elif arg == '-Xlinker':
- target = get_next(arg, args)
- if target is not None:
- yield target
- else:
- other_args.append(arg)
-
- # Extract all the possible ways rpath can appear in linker args, then
- # append non-rpaths to other_args. This happens in-line as the linker
- # args are extracted, so we preserve the original order of arguments.
- # This is important for args like --whole-archive, --no-whole-archive,
- # and others that tell the linker how to handle the next few libraries
- # it encounters on the command line.
- rpaths = []
- largs = linker_args()
- for arg in largs:
- if arg == '-rpath':
- target = get_next(arg, largs)
- if target is not None:
- rpaths.append(target)
-
- elif arg.startswith('-R'):
- target = arg.replace('-R', '', 1)
- if not target:
- target = get_next(arg, largs)
- if target is None: break
-
- if os.path.isdir(target):
- rpaths.append(target)
- else:
- other_args.extend(['-Wl,' + arg, '-Wl,' + target])
- else:
- other_args.append('-Wl,' + arg)
- return rpaths, other_args
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 35e3b898ec..646050d267 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -35,8 +35,8 @@ import spack.error
import spack.spec
from spack.util.multiproc import parmap
from spack.util.executable import *
+from spack.util.environment import get_path
from spack.version import Version
-from spack.compilation import get_path
__all__ = ['Compiler', 'get_compiler_version']
@@ -169,6 +169,10 @@ class Compiler(object):
checks = []
for directory in path:
+ if not (os.path.isdir(directory) and
+ os.access(directory, os.R_OK | os.X_OK)):
+ continue
+
files = os.listdir(directory)
for exe in files:
full_path = join_path(directory, exe)
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 467472cced..e572cd89b6 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -40,7 +40,7 @@ from spack.util.multiproc import parmap
from spack.compiler import Compiler
from spack.util.executable import which
from spack.util.naming import mod_to_class
-from spack.compilation import get_path
+from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index eee8cb7fde..3f569f9dce 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -68,8 +68,9 @@ class DefaultConcretizer(object):
# If there are known avaialble versions, return the most recent
# version that satisfies the spec
pkg = spec.package
- valid_versions = [v for v in pkg.available_versions
- if any(v.satisfies(sv) for sv in spec.versions)]
+ valid_versions = sorted(
+ [v for v in pkg.versions
+ if any(v.satisfies(sv) for sv in spec.versions)])
if valid_versions:
spec.versions = ver([valid_versions[-1]])
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 9b31aad5fe..b2cf5dc801 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -27,9 +27,11 @@ import os
import exceptions
import hashlib
import shutil
+import tempfile
from contextlib import closing
import llnl.util.tty as tty
+from llnl.util.lang import memoized
from llnl.util.filesystem import join_path, mkdirp
import spack
@@ -53,6 +55,19 @@ class DirectoryLayout(object):
self.root = root
+ @property
+ def hidden_file_paths(self):
+ """Return a list of hidden files used by the directory layout.
+
+ Paths are relative to the root of an install directory.
+
+ If the directory layout uses no hidden files to maintain
+ state, this should return an empty container, e.g. [] or (,).
+
+ """
+ raise NotImplementedError()
+
+
def all_specs(self):
"""To be implemented by subclasses to traverse all specs for which there is
a directory within the root.
@@ -71,6 +86,42 @@ class DirectoryLayout(object):
raise NotImplementedError()
+ def extension_map(self, spec):
+ """Get a dict of currently installed extension packages for a spec.
+
+ Dict maps { name : extension_spec }
+ Modifying dict does not affect internals of this layout.
+ """
+ raise NotImplementedError()
+
+
+ def check_extension_conflict(self, spec, ext_spec):
+ """Ensure that ext_spec can be activated in spec.
+
+ If not, raise ExtensionAlreadyInstalledError or
+ ExtensionConflictError.
+ """
+ raise NotImplementedError()
+
+
+ def check_activated(self, spec, ext_spec):
+ """Ensure that ext_spec can be removed from spec.
+
+ If not, raise NoSuchExtensionError.
+ """
+ raise NotImplementedError()
+
+
+ def add_extension(self, spec, ext_spec):
+ """Add to the list of currently installed extensions."""
+ raise NotImplementedError()
+
+
+ def remove_extension(self, spec, ext_spec):
+ """Remove from the list of currently installed extensions."""
+ raise NotImplementedError()
+
+
def path_for_spec(self, spec):
"""Return an absolute path from the root to a directory for the spec."""
_check_concrete(spec)
@@ -81,12 +132,17 @@ class DirectoryLayout(object):
def remove_path_for_spec(self, spec):
- """Removes a prefix and any empty parent directories from the root."""
+ """Removes a prefix and any empty parent directories from the root.
+ Raised RemoveFailedError if something goes wrong.
+ """
path = self.path_for_spec(spec)
assert(path.startswith(self.root))
if os.path.exists(path):
- shutil.rmtree(path, True)
+ try:
+ shutil.rmtree(path)
+ except exceptions.OSError, e:
+ raise RemoveFailedError(spec, path, e)
path = os.path.dirname(path)
while path != self.root:
@@ -134,9 +190,18 @@ class SpecHashDirectoryLayout(DirectoryLayout):
"""Prefix size is number of characters in the SHA-1 prefix to use
to make each hash unique.
"""
- spec_file_name = kwargs.get('spec_file_name', '.spec')
+ spec_file_name = kwargs.get('spec_file_name', '.spec')
+ extension_file_name = kwargs.get('extension_file_name', '.extensions')
super(SpecHashDirectoryLayout, self).__init__(root)
self.spec_file_name = spec_file_name
+ self.extension_file_name = extension_file_name
+
+ # Cache of already written/read extension maps.
+ self._extension_maps = {}
+
+ @property
+ def hidden_file_paths(self):
+ return ('.spec', '.extensions')
def relative_path_for_spec(self, spec):
@@ -157,19 +222,27 @@ class SpecHashDirectoryLayout(DirectoryLayout):
# Specs from files are assumed normal and concrete
spec = Spec(spec_file.read().replace('\n', ''))
- # If we do not have a package on hand for this spec, we know
- # it is concrete, and we *assume* that it is normal. This
- # prevents us from trying to fetch a non-existing package, and
- # allows best effort for commands like spack find.
- if not spack.db.exists(spec.name):
- spec._normal = True
- spec._concrete = True
- else:
- spec.normalize()
- if not spec.concrete:
- tty.warn("Spec read from installed package is not concrete:",
- path, spec)
-
+ if all(spack.db.exists(s.name) for s in spec.traverse()):
+ copy = spec.copy()
+
+ # TODO: It takes a lot of time to normalize every spec on read.
+ # TODO: Storing graph info with spec files would fix this.
+ copy.normalize()
+ if copy.concrete:
+ return copy # These are specs spack still understands.
+
+ # If we get here, either the spec is no longer in spack, or
+ # something about its dependencies has changed. So we need to
+ # just assume the read spec is correct. We'll lose graph
+ # information if we do this, but this is just for best effort
+ # for commands like uninstall and find. Currently Spack
+ # doesn't do anything that needs the graph info after install.
+
+ # TODO: store specs with full connectivity information, so
+ # that we don't have to normalize or reconstruct based on
+ # changing dependencies in the Spack tree.
+ spec._normal = True
+ spec._concrete = True
return spec
@@ -207,17 +280,116 @@ class SpecHashDirectoryLayout(DirectoryLayout):
self.write_spec(spec, spec_file_path)
+ @memoized
def all_specs(self):
if not os.path.isdir(self.root):
- return
+ return []
+ specs = []
for path in traverse_dirs_at_depth(self.root, 3):
arch, compiler, last_dir = path
spec_file_path = join_path(
self.root, arch, compiler, last_dir, self.spec_file_name)
if os.path.exists(spec_file_path):
spec = self.read_spec(spec_file_path)
- yield spec
+ specs.append(spec)
+ return specs
+
+
+ def extension_file_path(self, spec):
+ """Gets full path to an installed package's extension file"""
+ _check_concrete(spec)
+ return join_path(self.path_for_spec(spec), self.extension_file_name)
+
+
+ def _extension_map(self, spec):
+ """Get a dict<name -> spec> for all extensions currnetly
+ installed for this package."""
+ _check_concrete(spec)
+
+ if not spec in self._extension_maps:
+ path = self.extension_file_path(spec)
+ if not os.path.exists(path):
+ self._extension_maps[spec] = {}
+
+ else:
+ exts = {}
+ with closing(open(path)) as ext_file:
+ for line in ext_file:
+ try:
+ spec = Spec(line.strip())
+ exts[spec.name] = spec
+ except spack.error.SpackError, e:
+ # TODO: do something better here -- should be
+ # resilient to corrupt files.
+ raise InvalidExtensionSpecError(str(e))
+ self._extension_maps[spec] = exts
+
+ return self._extension_maps[spec]
+
+
+ def extension_map(self, spec):
+ """Defensive copying version of _extension_map() for external API."""
+ return self._extension_map(spec).copy()
+
+
+ def check_extension_conflict(self, spec, ext_spec):
+ exts = self._extension_map(spec)
+ if ext_spec.name in exts:
+ installed_spec = exts[ext_spec.name]
+ if ext_spec == installed_spec:
+ raise ExtensionAlreadyInstalledError(spec, ext_spec)
+ else:
+ raise ExtensionConflictError(spec, ext_spec, installed_spec)
+
+
+ def check_activated(self, spec, ext_spec):
+ exts = self._extension_map(spec)
+ if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]):
+ raise NoSuchExtensionError(spec, ext_spec)
+
+
+ def _write_extensions(self, spec, extensions):
+ path = self.extension_file_path(spec)
+
+ # Create a temp file in the same directory as the actual file.
+ dirname, basename = os.path.split(path)
+ tmp = tempfile.NamedTemporaryFile(
+ prefix=basename, dir=dirname, delete=False)
+
+ # Write temp file.
+ with closing(tmp):
+ for extension in sorted(extensions.values()):
+ tmp.write("%s\n" % extension)
+
+ # Atomic update by moving tmpfile on top of old one.
+ os.rename(tmp.name, path)
+
+
+ def add_extension(self, spec, ext_spec):
+ _check_concrete(spec)
+ _check_concrete(ext_spec)
+
+ # Check whether it's already installed or if it's a conflict.
+ exts = self._extension_map(spec)
+ self.check_extension_conflict(spec, ext_spec)
+
+ # do the actual adding.
+ exts[ext_spec.name] = ext_spec
+ self._write_extensions(spec, exts)
+
+
+ def remove_extension(self, spec, ext_spec):
+ _check_concrete(spec)
+ _check_concrete(ext_spec)
+
+ # Make sure it's installed before removing.
+ exts = self._extension_map(spec)
+ self.check_activated(spec, ext_spec)
+
+ # do the actual removing.
+ del exts[ext_spec.name]
+ self._write_extensions(spec, exts)
class DirectoryLayoutError(SpackError):
@@ -234,6 +406,15 @@ class SpecHashCollisionError(DirectoryLayoutError):
% installed_spec, new_spec)
+class RemoveFailedError(DirectoryLayoutError):
+ """Raised when a DirectoryLayout cannot remove an install prefix."""
+ def __init__(self, installed_spec, prefix, error):
+ super(RemoveFailedError, self).__init__(
+ 'Could not remove prefix %s for %s : %s'
+ % prefix, installed_spec.short_spec, error)
+ self.cause = error
+
+
class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
def __init__(self, message):
@@ -245,3 +426,34 @@ class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")
+
+
+class InvalidExtensionSpecError(DirectoryLayoutError):
+ """Raised when an extension file has a bad spec in it."""
+ def __init__(self, message):
+ super(InvalidExtensionSpecError, self).__init__(message)
+
+
+class ExtensionAlreadyInstalledError(DirectoryLayoutError):
+ """Raised when an extension is added to a package that already has it."""
+ def __init__(self, spec, ext_spec):
+ super(ExtensionAlreadyInstalledError, self).__init__(
+ "%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec))
+
+
+class ExtensionConflictError(DirectoryLayoutError):
+ """Raised when an extension is added to a package that already has it."""
+ def __init__(self, spec, ext_spec, conflict):
+ super(ExtensionConflictError, self).__init__(
+ "%s cannot be installed in %s because it conflicts with %s."% (
+ ext_spec.short_spec, spec.short_spec, conflict.short_spec))
+
+
+class NoSuchExtensionError(DirectoryLayoutError):
+ """Raised when an extension isn't there on deactivate."""
+ def __init__(self, spec, ext_spec):
+ super(NoSuchExtensionError, self).__init__(
+ "%s cannot be removed from %s because it's not activated."% (
+ ext_spec.short_spec, spec.short_spec))
+
+
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index 8bcbd83c28..e483ea613b 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -33,6 +33,12 @@ class SpackError(Exception):
self.long_message = long_message
+ def __str__(self):
+ msg = self.message
+ if self.long_message:
+ msg += "\n %s" % self.long_message
+ return msg
+
class UnsupportedPlatformError(SpackError):
"""Raised by packages when a platform is not supported"""
def __init__(self, message):
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 98c78c2e08..48313e2b37 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -41,11 +41,12 @@ in order to build it. They need to define the following methods:
Archive a source directory, e.g. for creating a mirror.
"""
import os
+import sys
import re
import shutil
from functools import wraps
import llnl.util.tty as tty
-
+from llnl.util.filesystem import *
import spack
import spack.error
import spack.util.crypto as crypto
@@ -141,13 +142,19 @@ class URLFetchStrategy(FetchStrategy):
tty.msg("Trying to fetch from %s" % self.url)
+ curl_args = ['-O', # save file to disk
+ '-f', # fail on >400 errors
+ '-D', '-', # print out HTML headers
+ '-L', self.url,]
+
+ if sys.stdout.isatty():
+ curl_args.append('-#') # status bar when using a tty
+ else:
+ curl_args.append('-sS') # just errors when not.
+
# Run curl but grab the mime type from the http headers
- headers = spack.curl('-#', # status bar
- '-O', # save file to disk
- '-f', # fail on >400 errors
- '-D', '-', # print out HTML headers
- '-L', self.url,
- return_output=True, fail_on_error=False)
+ headers = spack.curl(
+ *curl_args, return_output=True, fail_on_error=False)
if spack.curl.returncode != 0:
# clean up archive on failure.
@@ -156,9 +163,10 @@ class URLFetchStrategy(FetchStrategy):
if spack.curl.returncode == 22:
# This is a 404. Curl will print the error.
- raise FailedDownloadError(url)
+ raise FailedDownloadError(
+ self.url, "URL %s was not found!" % self.url)
- if spack.curl.returncode == 60:
+ elif spack.curl.returncode == 60:
# This is a certificate error. Suggest spack -k
raise FailedDownloadError(
self.url,
@@ -168,6 +176,13 @@ class URLFetchStrategy(FetchStrategy):
"can try running spack -k, which will not check SSL certificates."
"Use this at your own risk.")
+ else:
+ # This is some other curl error. Curl will print the
+ # error, but print a spack message too
+ raise FailedDownloadError(
+ self.url, "Curl failed with error %d" % spack.curl.returncode)
+
+
# Check if we somehow got an HTML file rather than the archive we
# asked for. We only look at the last content type, to handle
# redirects properly.
@@ -197,8 +212,26 @@ class URLFetchStrategy(FetchStrategy):
"Failed on expand() for URL %s" % self.url)
decompress = decompressor_for(self.archive_file)
+
+ # Expand all tarballs in their own directory to contain
+ # exploding tarballs.
+ tarball_container = os.path.join(self.stage.path, "spack-expanded-archive")
+ mkdirp(tarball_container)
+ os.chdir(tarball_container)
decompress(self.archive_file)
+ # If the tarball *didn't* explode, move
+ # the expanded directory up & remove the protector directory.
+ files = os.listdir(tarball_container)
+ if len(files) == 1:
+ expanded_dir = os.path.join(tarball_container, files[0])
+ if os.path.isdir(expanded_dir):
+ shutil.move(expanded_dir, self.stage.path)
+ os.rmdir(tarball_container)
+
+ # Set the wd back to the stage when done.
+ self.stage.chdir()
+
def archive(self, destination):
"""Just moves this archive to the destination."""
@@ -337,8 +370,7 @@ class GitFetchStrategy(VCSFetchStrategy):
@property
def git_version(self):
- git = which('git', required=True)
- vstring = git('--version', return_output=True).lstrip('git version ')
+ vstring = self.git('--version', return_output=True).lstrip('git version ')
return Version(vstring)
@@ -348,6 +380,7 @@ class GitFetchStrategy(VCSFetchStrategy):
self._git = which('git', required=True)
return self._git
+
@_needs_stage
def fetch(self):
self.stage.chdir()
@@ -580,7 +613,7 @@ def for_package_version(pkg, version):
version() in the package description."""
# If it's not a known version, extrapolate one.
if not version in pkg.versions:
- url = pkg.url_for_verison(version)
+ url = pkg.url_for_version(version)
if not url:
raise InvalidArgsError(pkg, version)
return URLFetchStrategy(url)
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
new file mode 100644
index 0000000000..5fb6a9cd23
--- /dev/null
+++ b/lib/spack/spack/graph.py
@@ -0,0 +1,553 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Functions for graphing DAGs of dependencies.
+
+This file contains code for graphing DAGs of software packages
+(i.e. Spack specs). There are two main functions you probably care
+about:
+
+graph_ascii() will output a colored graph of a spec in ascii format,
+kind of like the graph git shows with "git log --graph", e.g.::
+
+ o mpileaks
+ |\
+ | |\
+ | o | callpath
+ |/| |
+ | |\|
+ | |\ \
+ | | |\ \
+ | | | | o adept-utils
+ | |_|_|/|
+ |/| | | |
+ o | | | | mpi
+ / / / /
+ | | o | dyninst
+ | |/| |
+ |/|/| |
+ | | |/
+ | o | libdwarf
+ |/ /
+ o | libelf
+ /
+ o boost
+
+graph_dot() will output a graph of a spec (or multiple specs) in dot
+format.
+
+Note that ``graph_ascii`` assumes a single spec while ``graph_dot``
+can take a number of specs as input.
+
+"""
+__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
+
+from heapq import *
+
+from llnl.util.lang import *
+from llnl.util.tty.color import *
+
+import spack
+from spack.spec import Spec
+
+
+def topological_sort(spec, **kwargs):
+ """Topological sort for specs.
+
+ Return a list of dependency specs sorted topologically. The spec
+ argument is not modified in the process.
+
+ """
+ reverse = kwargs.get('reverse', False)
+ if not reverse:
+ parents = lambda s: s.dependents
+ children = lambda s: s.dependencies
+ else:
+ parents = lambda s: s.dependencies
+ children = lambda s: s.dependents
+
+ # Work on a copy so this is nondestructive.
+ spec = spec.copy()
+ nodes = spec.index()
+
+ topo_order = []
+ remaining = [name for name in nodes.keys() if not parents(nodes[name])]
+ heapify(remaining)
+
+ while remaining:
+ name = heappop(remaining)
+ topo_order.append(name)
+
+ node = nodes[name]
+ for dep in children(node).values():
+ del parents(dep)[node.name]
+ if not parents(dep):
+ heappush(remaining, dep.name)
+
+ if any(parents(s) for s in spec.traverse()):
+ raise ValueError("Spec has cycles!")
+ else:
+ return topo_order
+
+
+def find(seq, predicate):
+ """Find index in seq for which predicate is True.
+
+ Searches the sequence and returns the index of the element for
+ which the predicate evaluates to True. Returns -1 if the
+ predicate does not evaluate to True for any element in seq.
+
+ """
+ for i, elt in enumerate(seq):
+ if predicate(elt):
+ return i
+ return -1
+
+
+# Names of different graph line states. We Record previous line
+# states so that we can easily determine what to do when connecting.
+states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
+NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
+
+class AsciiGraph(object):
+ def __init__(self):
+ # These can be set after initialization or after a call to
+ # graph() to change behavior.
+ self.node_character = '*'
+ self.debug = False
+ self.indent = 0
+
+ # These are colors in the order they'll be used for edges.
+ # See llnl.util.tty.color for details on color characters.
+ self.colors = 'rgbmcyRGBMCY'
+
+ # Internal vars are used in the graph() function and are
+ # properly initialized there.
+ self._name_to_color = None # Node name to color
+ self._out = None # Output stream
+ self._frontier = None # frontier
+ self._nodes = None # dict from name -> node
+ self._prev_state = None # State of previous line
+ self._prev_index = None # Index of expansion point of prev line
+
+
+ def _indent(self):
+ self._out.write(self.indent * ' ')
+
+
+ def _write_edge(self, string, index, sub=0):
+ """Write a colored edge to the output stream."""
+ name = self._frontier[index][sub]
+ edge = "@%s{%s}" % (self._name_to_color[name], string)
+ self._out.write(edge)
+
+
+ def _connect_deps(self, i, deps, label=None):
+ """Connect dependencies to existing edges in the frontier.
+
+ ``deps`` are to be inserted at position i in the
+ frontier. This routine determines whether other open edges
+ should be merged with <deps> (if there are other open edges
+ pointing to the same place) or whether they should just be
+ inserted as a completely new open edge.
+
+ Open edges that are not fully expanded (i.e. those that point
+ at multiple places) are left intact.
+
+ Parameters:
+
+ label -- optional debug label for the connection.
+
+ Returns: True if the deps were connected to another edge
+ (i.e. the frontier did not grow) and False if the deps were
+ NOT already in the frontier (i.e. they were inserted and the
+ frontier grew).
+
+ """
+ if len(deps) == 1 and deps in self._frontier:
+ j = self._frontier.index(deps)
+
+ # convert a right connection into a left connection
+ if i < j:
+ self._frontier.pop(j)
+ self._frontier.insert(i, deps)
+ return self._connect_deps(j, deps, label)
+
+ collapse = True
+ if self._prev_state == EXPAND_RIGHT:
+ # Special case where previous line expanded and i is off by 1.
+ self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j)))
+ collapse = False
+
+ else:
+ # Previous node also expanded here, so i is off by one.
+ if self._prev_state == NODE and self._prev_index < i:
+ i += 1
+
+ if i-j > 1:
+ # We need two lines to connect if distance > 1
+ self._back_edge_line([], j, i, True, label + "-1 " + str((i,j)))
+ collapse = False
+
+ self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j)))
+ return True
+
+ elif deps:
+ self._frontier.insert(i, deps)
+ return False
+
+
+ def _set_state(self, state, index, label=None):
+ if state not in states:
+ raise ValueError("Invalid graph state!")
+ self._prev_state = state
+ self._prev_index = index
+
+ if self.debug:
+ self._out.write(" " * 20)
+ self._out.write("%-20s" % (
+ str(self._prev_state) if self._prev_state else ''))
+ self._out.write("%-20s" % (str(label) if label else ''))
+ self._out.write("%s" % self._frontier)
+
+
+ def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
+ """Write part of a backwards edge in the graph.
+
+ Writes single- or multi-line backward edges in an ascii graph.
+ For example, a single line edge::
+
+ | | | | o |
+ | | | |/ / <-- single-line edge connects two nodes.
+ | | | o |
+
+ Or a multi-line edge (requires two calls to back_edge)::
+
+ | | | | o |
+ | |_|_|/ / <-- multi-line edge crosses vertical edges.
+ |/| | | |
+ o | | | |
+
+ Also handles "pipelined" edges, where the same line contains
+ parts of multiple edges::
+
+ o start
+ | |_|_|_|/|
+ |/| | |_|/| <-- this line has parts of 2 edges.
+ | | |/| | |
+ o o
+
+ Arguments:
+
+ prev_ends -- indices in frontier of previous edges that need
+ to be finished on this line.
+
+ end -- end of the current edge on this line.
+
+ start -- start index of the current edge.
+
+ collapse -- whether the graph will be collapsing (i.e. whether
+ to slant the end of the line or keep it straight)
+
+ label -- optional debug label to print after the line.
+
+ """
+ def advance(to_pos, edges):
+ """Write edges up to <to_pos>."""
+ for i in range(self._pos, to_pos):
+ for e in edges():
+ self._write_edge(*e)
+ self._pos += 1
+
+ flen = len(self._frontier)
+ self._pos = 0
+ self._indent()
+
+ for p in prev_ends:
+ advance(p, lambda: [("| ", self._pos)] )
+ advance(p+1, lambda: [("|/", self._pos)] )
+
+ if end >= 0:
+ advance(end + 1, lambda: [("| ", self._pos)] )
+ advance(start - 1, lambda: [("|", self._pos), ("_", end)] )
+ else:
+ advance(start - 1, lambda: [("| ", self._pos)] )
+
+ if start >= 0:
+ advance(start, lambda: [("|", self._pos), ("/", end)] )
+
+ if collapse:
+ advance(flen, lambda: [(" /", self._pos)] )
+ else:
+ advance(flen, lambda: [("| ", self._pos)] )
+
+ self._set_state(BACK_EDGE, end, label)
+ self._out.write("\n")
+
+
+ def _node_line(self, index, name):
+ """Writes a line with a node at index."""
+ self._indent()
+ for c in range(index):
+ self._write_edge("| ", c)
+
+ self._out.write("%s " % self.node_character)
+
+ for c in range(index+1, len(self._frontier)):
+ self._write_edge("| ", c)
+
+ self._out.write(" %s" % name)
+ self._set_state(NODE, index)
+ self._out.write("\n")
+
+
+ def _collapse_line(self, index):
+ """Write a collapsing line after a node was added at index."""
+ self._indent()
+ for c in range(index):
+ self._write_edge("| ", c)
+ for c in range(index, len(self._frontier)):
+ self._write_edge(" /", c)
+
+ self._set_state(COLLAPSE, index)
+ self._out.write("\n")
+
+
+ def _merge_right_line(self, index):
+ """Edge at index is same as edge to right. Merge directly with '\'"""
+ self._indent()
+ for c in range(index):
+ self._write_edge("| ", c)
+ self._write_edge("|", index)
+ self._write_edge("\\", index+1)
+ for c in range(index+1, len(self._frontier)):
+ self._write_edge("| ", c )
+
+ self._set_state(MERGE_RIGHT, index)
+ self._out.write("\n")
+
+
+ def _expand_right_line(self, index):
+ self._indent()
+ for c in range(index):
+ self._write_edge("| ", c)
+
+ self._write_edge("|", index)
+ self._write_edge("\\", index+1)
+
+ for c in range(index+2, len(self._frontier)):
+ self._write_edge(" \\", c)
+
+ self._set_state(EXPAND_RIGHT, index)
+ self._out.write("\n")
+
+
+ def write(self, spec, **kwargs):
+ """Write out an ascii graph of the provided spec.
+
+ Arguments:
+ spec -- spec to graph. This only handles one spec at a time.
+
+ Optional arguments:
+
+ out -- file object to write out to (default is sys.stdout)
+
+ color -- whether to write in color. Default is to autodetect
+ based on output file.
+
+ """
+ out = kwargs.get('out', None)
+ if not out:
+ out = sys.stdout
+
+ color = kwargs.get('color', None)
+ if not color:
+ color = out.isatty()
+ self._out = ColorStream(sys.stdout, color=color)
+
+ # We'll traverse the spec in topo order as we graph it.
+ topo_order = topological_sort(spec, reverse=True)
+
+ # Work on a copy to be nondestructive
+ spec = spec.copy()
+ self._nodes = spec.index()
+
+ # Colors associated with each node in the DAG.
+ # Edges are colored by the node they point to.
+ self._name_to_color = dict((name, self.colors[i % len(self.colors)])
+ for i, name in enumerate(topo_order))
+
+ # Frontier tracks open edges of the graph as it's written out.
+ self._frontier = [[spec.name]]
+ while self._frontier:
+ # Find an unexpanded part of frontier
+ i = find(self._frontier, lambda f: len(f) > 1)
+
+ if i >= 0:
+ # Expand frontier until there are enough columns for all children.
+
+ # Figure out how many back connections there are and
+ # sort them so we do them in order
+ back = []
+ for d in self._frontier[i]:
+ b = find(self._frontier[:i], lambda f: f == [d])
+ if b != -1:
+ back.append((b, d))
+
+ # Do all back connections in sorted order so we can
+ # pipeline them and save space.
+ if back:
+ back.sort()
+ prev_ends = []
+ for j, (b, d) in enumerate(back):
+ self._frontier[i].remove(d)
+ if i-b > 1:
+ self._back_edge_line(prev_ends, b, i, False, 'left-1')
+ del prev_ends[:]
+ prev_ends.append(b)
+
+ # Check whether we did ALL the deps as back edges,
+ # in which case we're done.
+ collapse = not self._frontier[i]
+ if collapse:
+ self._frontier.pop(i)
+ self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2')
+
+ elif len(self._frontier[i]) > 1:
+ # Expand forward after doing all back connections
+
+ if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1
+ and self._frontier[i+1][0] in self._frontier[i]):
+ # We need to connect to the element to the right.
+ # Keep lines straight by connecting directly and
+ # avoiding unnecessary expand/contract.
+ name = self._frontier[i+1][0]
+ self._frontier[i].remove(name)
+ self._merge_right_line(i)
+
+ else:
+ # Just allow the expansion here.
+ name = self._frontier[i].pop(0)
+ deps = [name]
+ self._frontier.insert(i, deps)
+ self._expand_right_line(i)
+
+ self._frontier.pop(i)
+ self._connect_deps(i, deps, "post-expand")
+
+
+ # Handle any remaining back edges to the right
+ j = i+1
+ while j < len(self._frontier):
+ deps = self._frontier.pop(j)
+ if not self._connect_deps(j, deps, "back-from-right"):
+ j += 1
+
+ else:
+ # Nothing to expand; add dependencies for a node.
+ name = topo_order.pop()
+ node = self._nodes[name]
+
+ # Find the named node in the frontier and draw it.
+ i = find(self._frontier, lambda f: name in f)
+ self._node_line(i, name)
+
+ # Replace node with its dependencies
+ self._frontier.pop(i)
+ if node.dependencies:
+ deps = sorted((d for d in node.dependencies), reverse=True)
+ self._connect_deps(i, deps, "new-deps") # anywhere.
+
+ elif self._frontier:
+ self._collapse_line(i)
+
+
+def graph_ascii(spec, **kwargs):
+ node_character = kwargs.get('node', 'o')
+ out = kwargs.pop('out', None)
+ debug = kwargs.pop('debug', False)
+ indent = kwargs.pop('indent', 0)
+ color = kwargs.pop('color', None)
+ check_kwargs(kwargs, graph_ascii)
+
+ graph = AsciiGraph()
+ graph.debug = debug
+ graph.indent = indent
+ graph.node_character = node_character
+
+ graph.write(spec, color=color, out=out)
+
+
+
+def graph_dot(*specs, **kwargs):
+ """Generate a graph in dot format of all provided specs.
+
+ Print out a dot formatted graph of all the dependencies between
+ package. Output can be passed to graphviz, e.g.:
+
+ spack graph --dot qt | dot -Tpdf > spack-graph.pdf
+
+ """
+ out = kwargs.pop('out', sys.stdout)
+ check_kwargs(kwargs, graph_dot)
+
+ out.write('digraph G {\n')
+ out.write(' label = "Spack Dependencies"\n')
+ out.write(' labelloc = "b"\n')
+ out.write(' rankdir = "LR"\n')
+ out.write(' ranksep = "5"\n')
+ out.write('\n')
+
+ def quote(string):
+ return '"%s"' % string
+
+ if not specs:
+ specs = [p.name for p in spack.db.all_packages()]
+ else:
+ roots = specs
+ specs = set()
+ for spec in roots:
+ specs.update(Spec(s.name) for s in spec.normalized().traverse())
+
+ deps = []
+ for spec in specs:
+ out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name))
+
+ # Skip virtual specs (we'll find out about them from concrete ones.
+ if spec.virtual:
+ continue
+
+ # Add edges for each depends_on in the package.
+ for dep_name, dep in spec.package.dependencies.iteritems():
+ deps.append((spec.name, dep_name))
+
+ # If the package provides something, add an edge for that.
+ for provider in set(s.name for s in spec.package.provided):
+ deps.append((provider, spec.name))
+
+ out.write('\n')
+
+ for pair in deps:
+ out.write(' "%s" -> "%s"\n' % pair)
+ out.write('}\n')
diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py
index 4f0d574e49..1c44e8abaa 100644
--- a/lib/spack/spack/hooks/__init__.py
+++ b/lib/spack/spack/hooks/__init__.py
@@ -31,7 +31,9 @@
Currently the following hooks are supported:
+ * pre_install()
* post_install()
+ * pre_uninstall()
* post_uninstall()
This can be used to implement support for things like module
@@ -47,8 +49,11 @@ import spack
def all_hook_modules():
modules = []
for name in list_modules(spack.hooks_path):
+ mod_name = __name__ + '.' + name
path = join_path(spack.hooks_path, name) + ".py"
- modules.append(imp.load_source('spack.hooks', path))
+ mod = imp.load_source(mod_name, path)
+ modules.append(mod)
+
return modules
@@ -67,5 +72,8 @@ class HookRunner(object):
#
# Define some functions that can be called to fire off hooks.
#
-post_install = HookRunner('post_install')
+pre_install = HookRunner('pre_install')
+post_install = HookRunner('post_install')
+
+pre_uninstall = HookRunner('pre_uninstall')
post_uninstall = HookRunner('post_uninstall')
diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py
new file mode 100644
index 0000000000..cf87a78c8c
--- /dev/null
+++ b/lib/spack/spack/hooks/extensions.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import spack
+
+
+def pre_uninstall(pkg):
+ # Need to do this b/c uninstall does not automatically do it.
+ # TODO: store full graph info in stored .spec file.
+ pkg.spec.normalize()
+
+ if pkg.is_extension:
+ if pkg.activated:
+ pkg.do_deactivate(force=True)
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 9c700cd551..306c8085aa 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -37,6 +37,7 @@ from llnl.util.filesystem import *
import spack
import spack.error
+import spack.url as url
import spack.fetch_strategy as fs
from spack.spec import Spec
from spack.stage import Stage
@@ -45,14 +46,14 @@ from spack.util.compression import extension
def mirror_archive_filename(spec):
- """Get the path that this spec will live at within a mirror."""
+ """Get the name of the spec's archive in the mirror."""
if not spec.version.concrete:
raise ValueError("mirror.path requires spec with concrete version.")
fetcher = spec.package.fetcher
if isinstance(fetcher, fs.URLFetchStrategy):
# If we fetch this version with a URLFetchStrategy, use URL's archive type
- ext = extension(fetcher.url)
+ ext = url.downloaded_file_extension(fetcher.url)
else:
# Otherwise we'll make a .tar.gz ourselves
ext = 'tar.gz'
@@ -60,6 +61,11 @@ def mirror_archive_filename(spec):
return "%s-%s.%s" % (spec.package.name, spec.version, ext)
+def mirror_archive_path(spec):
+ """Get the relative path to the spec's archive within a mirror."""
+ return join_path(spec.name, mirror_archive_filename(spec))
+
+
def get_matching_versions(specs, **kwargs):
"""Get a spec for EACH known version matching any spec in the list."""
matching = []
@@ -140,12 +146,10 @@ def create(path, specs, **kwargs):
stage = None
try:
# create a subdirectory for the current package@version
- subdir = join_path(mirror_root, pkg.name)
+ archive_path = os.path.abspath(join_path(path, mirror_archive_path(spec)))
+ subdir = os.path.dirname(archive_path)
mkdirp(subdir)
- archive_file = mirror_archive_filename(spec)
- archive_path = join_path(subdir, archive_file)
-
if os.path.exists(archive_path):
tty.msg("Already added %s" % spec.format("$_$@"))
present.append(spec)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index 755e9ea900..7d2ca97a62 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -49,6 +49,7 @@ import os
import re
import textwrap
import shutil
+from glob import glob
from contextlib import closing
import llnl.util.tty as tty
@@ -123,6 +124,13 @@ class EnvModule(object):
if os.path.isdir(directory):
add_path(var, directory)
+ # Add python path unless it's an actual python installation
+ # TODO: is there a better way to do this?
+ if self.spec.name != 'python':
+ site_packages = glob(join_path(self.spec.prefix.lib, "python*/site-packages"))
+ if site_packages:
+ add_path('PYTHONPATH', site_packages[0])
+
# short description is just the package + version
# TODO: maybe packages can optionally provide it.
self.short_description = self.spec.format("$_ $@")
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 649e772a10..75e6142a9d 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -35,13 +35,17 @@ README.
"""
import os
import re
+import time
import inspect
import subprocess
import platform as py_platform
import multiprocessing
-from urlparse import urlparse
+from urlparse import urlparse, urljoin
+import textwrap
+from StringIO import StringIO
import llnl.util.tty as tty
+from llnl.util.link_tree import LinkTree
from llnl.util.filesystem import *
from llnl.util.lang import *
@@ -49,6 +53,7 @@ import spack
import spack.spec
import spack.error
import spack.compilers
+import spack.mirror
import spack.hooks
import spack.build_environment as build_env
import spack.url as url
@@ -282,10 +287,9 @@ class Package(object):
.. code-block:: python
- p.do_clean() # runs make clean
- p.do_clean_work() # removes the build directory and
+ p.do_clean() # removes the stage directory entirely
+ p.do_restage() # removes the build directory and
# re-expands the archive.
- p.do_clean_dist() # removes the stage directory entirely
The convention used here is that a do_* function is intended to be called
internally by Spack commands (in spack.cmd). These aren't for package
@@ -316,12 +320,21 @@ class Package(object):
"""Patches to apply to newly expanded source, if any."""
patches = {}
+ """Specs of package this one extends, or None.
+
+ Currently, ppackages can extend at most one other package.
+ """
+ extendees = {}
+
#
# These are default values for instance variables.
#
"""By default we build in parallel. Subclasses can override this."""
parallel = True
+ """Most packages are NOT extendable. Set to True if you want extensions."""
+ extendable = False
+
def __init__(self, spec):
# this determines how the package should be built.
@@ -333,9 +346,6 @@ class Package(object):
if '.' in self.name:
self.name = self.name[self.name.rindex('.') + 1:]
- # This is set by scraping a web page.
- self._available_versions = None
-
# Sanity check some required variables that could be
# overridden by package authors.
def ensure_has_dict(attr_name):
@@ -368,12 +378,20 @@ class Package(object):
# stage used to build this package.
self._stage = None
- # If there's no default URL provided, set this package's url to None
- if not hasattr(self, 'url'):
- self.url = None
-
- # Init fetch strategy to None
+ # Init fetch strategy and url to None
self._fetcher = None
+ self.url = getattr(self.__class__, 'url', None)
+
+ # Fix up self.url if this package fetches with a URLFetchStrategy.
+ # This makes self.url behave sanely.
+ if self.spec.versions.concrete:
+ # TODO: this is a really roundabout way of determining the type
+ # TODO: of fetch to do. figure out a more sane fetch strategy/package
+ # TODO: init order (right now it's conflated with stage, package, and
+ # TODO: the tests make assumptions)
+ f = fs.for_package_version(self, self.version)
+ if isinstance(f, fs.URLFetchStrategy):
+ self.url = self.url_for_version(self.spec.version)
# Set a default list URL (place to find available versions)
if not hasattr(self, 'list_url'):
@@ -382,6 +400,13 @@ class Package(object):
if not hasattr(self, 'list_depth'):
self.list_depth = 1
+ # Set up some internal variables for timing.
+ self._fetch_time = 0.0
+ self._total_time = 0.0
+
+ if self.is_extension:
+ spack.db.get(self.extendee_spec)._check_extendable()
+
@property
def version(self):
@@ -410,7 +435,7 @@ class Package(object):
*higher* URL, and if that isn't there raises an error.
"""
version_urls = self.version_urls()
- url = self.url
+ url = getattr(self.__class__, 'url', None)
for v in version_urls:
if v > version and url:
@@ -420,21 +445,15 @@ class Package(object):
return url
- def has_url(self):
- """Returns whether there is a URL available for this package.
- If there isn't, it's probably fetched some other way (version
- control, etc.)"""
- return self.url or self.version_urls()
-
-
# TODO: move this out of here and into some URL extrapolation module?
def url_for_version(self, version):
"""Returns a URL that you can download a new version of this package from."""
if not isinstance(version, Version):
version = Version(version)
- if not self.has_url():
- raise NoURLError(self.__class__)
+ cls = self.__class__
+ if not (hasattr(cls, 'url') or self.version_urls()):
+ raise NoURLError(cls)
# If we have a specific URL for this version, don't extrapolate.
version_urls = self.version_urls()
@@ -452,9 +471,9 @@ class Package(object):
raise ValueError("Can only get a stage for a concrete package.")
if self._stage is None:
- self._stage = Stage(self.fetcher,
- mirror_path=self.mirror_path(),
- name=self.spec.short_spec)
+ mp = spack.mirror.mirror_archive_path(self.spec)
+ self._stage = Stage(
+ self.fetcher, mirror_path=mp, name=self.spec.short_spec)
return self._stage
@@ -474,11 +493,45 @@ class Package(object):
self._fetcher = f
- def mirror_path(self):
- """Get path to this package's archive in a mirror."""
- filename = "%s-%s." % (self.name, self.version)
- filename += extension(self.url) if self.has_url() else "tar.gz"
- return "%s/%s" % (self.name, filename)
+ @property
+ def extendee_spec(self):
+ """Spec of the extendee of this package, or None if it is not an extension."""
+ if not self.extendees:
+ return None
+ name = next(iter(self.extendees))
+ if not name in self.spec:
+ spec, kwargs = self.extendees[name]
+ return spec
+
+ # Need to do this to get the concrete version of the spec
+ return self.spec[name]
+
+
+ @property
+ def extendee_args(self):
+ """Spec of the extendee of this package, or None if it is not an extension."""
+ if not self.extendees:
+ return None
+ name = next(iter(self.extendees))
+ return self.extendees[name][1]
+
+
+ @property
+ def is_extension(self):
+ return len(self.extendees) > 0
+
+
+ def extends(self, spec):
+ return (spec.name in self.extendees and
+ spec.satisfies(self.extendees[spec.name][0]))
+
+
+ @property
+ def activated(self):
+ if not self.is_extension:
+ raise ValueError("is_extension called on package that is not an extension.")
+ exts = spack.install_layout.extension_map(self.extendee_spec)
+ return (self.name in exts) and (exts[self.name] == self.spec)
def preorder_traversal(self, visited=None, **kwargs):
@@ -611,14 +664,26 @@ class Package(object):
if not self.spec.concrete:
raise ValueError("Can only fetch concrete packages.")
+ start_time = time.time()
if spack.do_checksum and not self.version in self.versions:
- raise FetchError(
- "Cannot fetch %s safely; there is no checksum on file for version %s."
- % (self.name, self.version),
- "Add a checksum to the package file, or use --no-checksum to "
- "skip this check.")
+ tty.warn("There is no checksum on file to fetch %s safely."
+ % self.spec.format('$_$@'))
+
+ # Ask the user whether to skip the checksum if we're
+ # interactive, but just fail if non-interactive.
+ checksum_msg = "Add a checksum or use --no-checksum to skip this check."
+ ignore_checksum = False
+ if sys.stdout.isatty():
+ ignore_checksum = tty.get_yes_or_no(" Fetch anyway?", default=False)
+ if ignore_checksum:
+ tty.msg("Fetching with no checksum.", checksum_msg)
+
+ if not ignore_checksum:
+ raise FetchError(
+ "Will not fetch %s." % self.spec.format('$_$@'), checksum_msg)
self.stage.fetch()
+ self._fetch_time = time.time() - start_time
if spack.do_checksum and self.version in self.versions:
self.stage.check()
@@ -650,8 +715,11 @@ class Package(object):
# Kick off the stage first.
self.do_stage()
+ # Package can add its own patch function.
+ has_patch_fun = hasattr(self, 'patch') and callable(self.patch)
+
# If there are no patches, note it.
- if not self.patches:
+ if not self.patches and not has_patch_fun:
tty.msg("No patches needed for %s." % self.name)
return
@@ -674,7 +742,7 @@ class Package(object):
tty.msg("Already patched %s" % self.name)
return
- # Apply all the patches for specs that match this on
+ # Apply all the patches for specs that match this one
for spec, patch_list in self.patches.items():
if self.spec.satisfies(spec):
for patch in patch_list:
@@ -692,15 +760,29 @@ class Package(object):
os.remove(bad_file)
touch(good_file)
+ if has_patch_fun:
+ self.patch()
+
+ tty.msg("Patched %s" % self.name)
+
+
+ def do_fake_install(self):
+ """Make a fake install directory contaiing a 'fake' file in bin."""
+ mkdirp(self.prefix.bin)
+ touch(join_path(self.prefix.bin, 'fake'))
+ mkdirp(self.prefix.lib)
+ mkdirp(self.prefix.man1)
+
def do_install(self, **kwargs):
"""This class should call this version of the install method.
Package implementations should override install().
"""
# whether to keep the prefix on failure. Default is to destroy it.
- keep_prefix = kwargs.get('keep_prefix', False)
- keep_stage = kwargs.get('keep_stage', False)
- ignore_deps = kwargs.get('ignore_deps', False)
+ keep_prefix = kwargs.get('keep_prefix', False)
+ keep_stage = kwargs.get('keep_stage', False)
+ ignore_deps = kwargs.get('ignore_deps', False)
+ fake_install = kwargs.get('fake', False)
if not self.spec.concrete:
raise ValueError("Can only install concrete packages.")
@@ -709,53 +791,51 @@ class Package(object):
tty.msg("%s is already installed in %s." % (self.name, self.prefix))
return
+ tty.msg("Installing %s" % self.name)
+
if not ignore_deps:
- self.do_install_dependencies()
+ self.do_install_dependencies(**kwargs)
- self.do_patch()
+ start_time = time.time()
+ if not fake_install:
+ self.do_patch()
- # Fork a child process to do the build. This allows each
- # package authors to have full control over their environment,
- # etc. without offecting other builds that might be executed
- # in the same spack call.
- try:
- pid = os.fork()
- except OSError, e:
- raise InstallError("Unable to fork build process: %s" % e)
+ # create the install directory. The install layout
+ # handles this in case so that it can use whatever
+ # package naming scheme it likes.
+ spack.install_layout.make_path_for_spec(self.spec)
- if pid == 0:
+ def real_work():
try:
tty.msg("Building %s." % self.name)
- # create the install directory. The install layout
- # handles this in case so that it can use whatever
- # package naming scheme it likes.
- spack.install_layout.make_path_for_spec(self.spec)
+ # Run the pre-install hook in the child process after
+ # the directory is created.
+ spack.hooks.pre_install(self)
# Set up process's build environment before running install.
- build_env.set_compiler_environment_variables(self)
- build_env.set_build_environment_variables(self)
- build_env.set_module_variables_for_package(self)
-
- # Subclasses implement install() to do the real work.
- self.install(self.spec, self.prefix)
+ self.stage.chdir_to_source()
+ if fake_install:
+ self.do_fake_install()
+ else:
+ # Subclasses implement install() to do the real work.
+ self.install(self.spec, self.prefix)
# Ensure that something was actually installed.
- if not os.listdir(self.prefix):
- raise InstallError(
- "Install failed for %s. Nothing was installed!"
- % self.name)
+ self._sanity_check_install()
# On successful install, remove the stage.
if not keep_stage:
self.stage.destroy()
- tty.msg("Successfully installed %s" % self.name)
- print_pkg(self.prefix)
+ # Stop timer.
+ self._total_time = time.time() - start_time
+ build_time = self._total_time - self._fetch_time
- # Use os._exit here to avoid raising a SystemExit exception,
- # which interferes with unit tests.
- os._exit(0)
+ tty.msg("Successfully installed %s." % self.name,
+ "Fetch: %s. Build: %s. Total: %s."
+ % (_hms(self._fetch_time), _hms(build_time), _hms(self._total_time)))
+ print_pkg(self.prefix)
except:
if not keep_prefix:
@@ -766,28 +846,26 @@ class Package(object):
"Spack will think this package is installed." +
"Manually remove this directory to fix:",
self.prefix)
+ raise
- # Child doesn't raise or return to main spack code.
- # Just runs default exception handler and exits.
- sys.excepthook(*sys.exc_info())
- os._exit(1)
-
- # Parent process just waits for the child to complete. If the
- # child exited badly, assume it already printed an appropriate
- # message. Just make the parent exit with an error code.
- pid, returncode = os.waitpid(pid, 0)
- if returncode != 0:
- sys.exit(1)
-
+ build_env.fork(self, real_work)
# Once everything else is done, run post install hooks
spack.hooks.post_install(self)
- def do_install_dependencies(self):
+ def _sanity_check_install(self):
+ installed = set(os.listdir(self.prefix))
+ installed.difference_update(spack.install_layout.hidden_file_paths)
+ if not installed:
+ raise InstallError(
+ "Install failed for %s. Nothing was installed!" % self.name)
+
+
+ def do_install_dependencies(self, **kwargs):
# Pass along paths of dependencies here
for dep in self.spec.dependencies.values():
- dep.package.do_install()
+ dep.package.do_install(**kwargs)
@property
@@ -799,6 +877,32 @@ class Package(object):
fromlist=[self.__class__.__name__])
+ def setup_dependent_environment(self, module, spec, dependent_spec):
+ """Called before the install() method of dependents.
+
+ Default implementation does nothing, but this can be
+ overridden by an extendable package to set up the install
+ environment for its extensions. This is useful if there are
+ some common steps to installing all extensions for a
+ certain package.
+
+ Some examples:
+
+ 1. Installing python modules generally requires PYTHONPATH to
+ point to the lib/pythonX.Y/site-packages directory in the
+ module's install prefix. This could set that variable.
+
+ 2. Extensions often need to invoke the 'python' interpreter
+ from the Python installation being extended. This routine can
+ put a 'python' Execuable object in the module scope for the
+ extension package to simplify extension installs.
+
+ 3. A lot of Qt extensions need QTDIR set. This can be used to do that.
+
+ """
+ pass
+
+
def install(self, spec, prefix):
"""Package implementations override this with their own build configuration."""
raise InstallError("Package %s provides no install method!" % self.name)
@@ -818,6 +922,10 @@ class Package(object):
"The following installed packages depend on it: %s" %
' '.join(formatted_deps))
+ # Pre-uninstall hook runs first.
+ spack.hooks.pre_uninstall(self)
+
+ # Uninstalling in Spack only requires removing the prefix.
self.remove_prefix()
tty.msg("Successfully uninstalled %s." % self.spec.short_spec)
@@ -825,95 +933,227 @@ class Package(object):
spack.hooks.post_uninstall(self)
- def do_clean(self):
- if self.stage.expanded_archive_path:
- self.stage.chdir_to_source()
- self.clean()
+ def _check_extendable(self):
+ if not self.extendable:
+ raise ValueError("Package %s is not extendable!" % self.name)
- def clean(self):
- """By default just runs make clean. Override if this isn't good."""
- # TODO: should we really call make clean, ro just blow away the directory?
- make = build_env.MakeExecutable('make', self.parallel)
- make('clean')
+ def _sanity_check_extension(self):
+ if not self.is_extension:
+ raise ActivationError("This package is not an extension.")
+ extendee_package = self.extendee_spec.package
+ extendee_package._check_extendable()
+
+ if not extendee_package.installed:
+ raise ActivationError("Can only (de)activate extensions for installed packages.")
+ if not self.installed:
+ raise ActivationError("Extensions must first be installed.")
+ if not self.extendee_spec.name in self.extendees:
+ raise ActivationError("%s does not extend %s!" % (self.name, self.extendee.name))
+
+
+ def do_activate(self, **kwargs):
+ """Called on an etension to invoke the extendee's activate method.
+
+ Commands should call this routine, and should not call
+ activate() directly.
+ """
+ self._sanity_check_extension()
+ force = kwargs.get('force', False)
- def do_clean_work(self):
- """By default just blows away the stage directory and re-stages."""
+ # TODO: get rid of this normalize - DAG handling.
+ self.spec.normalize()
+
+ spack.install_layout.check_extension_conflict(self.extendee_spec, self.spec)
+
+ if not force:
+ for spec in self.spec.traverse(root=False):
+ if spec.package.extends(self.extendee_spec):
+ # TODO: fix this normalize() requirement -- revisit DAG handling.
+ spec.package.spec.normalize()
+ if not spec.package.activated:
+ spec.package.do_activate(**kwargs)
+
+ self.extendee_spec.package.activate(self, **self.extendee_args)
+
+ spack.install_layout.add_extension(self.extendee_spec, self.spec)
+ tty.msg("Activated extension %s for %s."
+ % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
+
+
+ def activate(self, extension, **kwargs):
+ """Symlinks all files from the extension into extendee's install dir.
+
+ Package authors can override this method to support other
+ extension mechanisms. Spack internals (commands, hooks, etc.)
+ should call do_activate() method so that proper checks are
+ always executed.
+
+ """
+ def ignore(filename):
+ return (filename in spack.install_layout.hidden_file_paths or
+ kwargs.get('ignore', lambda f: False)(filename))
+
+ tree = LinkTree(extension.prefix)
+ conflict = tree.find_conflict(self.prefix, ignore=ignore)
+ if conflict:
+ raise ExtensionConflictError(conflict)
+ tree.merge(self.prefix, ignore=ignore)
+
+
+ def do_deactivate(self, **kwargs):
+ """Called on the extension to invoke extendee's deactivate() method."""
+ self._sanity_check_extension()
+ force = kwargs.get('force', False)
+
+ # Allow a force deactivate to happen. This can unlink
+ # spurious files if something was corrupted.
+ if not force:
+ spack.install_layout.check_activated(self.extendee_spec, self.spec)
+
+ activated = spack.install_layout.extension_map(self.extendee_spec)
+ for name, aspec in activated.items():
+ if aspec != self.spec and self.spec in aspec:
+ raise ActivationError(
+ "Cannot deactivate %s beacuse %s is activated and depends on it."
+ % (self.spec.short_spec, aspec.short_spec))
+
+ self.extendee_spec.package.deactivate(self, **self.extendee_args)
+
+ # redundant activation check -- makes SURE the spec is not
+ # still activated even if something was wrong above.
+ if self.activated:
+ spack.install_layout.remove_extension(self.extendee_spec, self.spec)
+
+ tty.msg("Deactivated extension %s for %s."
+ % (self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
+
+
+ def deactivate(self, extension, **kwargs):
+ """Unlinks all files from extension out of this package's install dir.
+
+ Package authors can override this method to support other
+ extension mechanisms. Spack internals (commands, hooks, etc.)
+ should call do_deactivate() method so that proper checks are
+ always executed.
+
+ """
+ def ignore(filename):
+ return (filename in spack.install_layout.hidden_file_paths or
+ kwargs.get('ignore', lambda f: False)(filename))
+
+ tree = LinkTree(extension.prefix)
+ tree.unmerge(self.prefix, ignore=ignore)
+
+
+ def do_restage(self):
+ """Reverts expanded/checked out source to a pristine state."""
self.stage.restage()
- def do_clean_dist(self):
- """Removes the stage directory where this package was built."""
+ def do_clean(self):
+ """Removes the package's build stage and source tarball."""
if os.path.exists(self.stage.path):
self.stage.destroy()
- def fetch_available_versions(self):
- if not hasattr(self, 'url'):
- raise VersionFetchError(self.__class__)
+ def format_doc(self, **kwargs):
+ """Wrap doc string at 72 characters and format nicely"""
+ indent = kwargs.get('indent', 0)
- # If not, then try to fetch using list_url
- if not self._available_versions:
- try:
- self._available_versions = find_versions_of_archive(
- self.url,
- list_url=self.list_url,
- list_depth=self.list_depth)
+ if not self.__doc__:
+ return ""
+
+ doc = re.sub(r'\s+', ' ', self.__doc__)
+ lines = textwrap.wrap(doc, 72)
+ results = StringIO()
+ for line in lines:
+ results.write((" " * indent) + line + "\n")
+ return results.getvalue()
+
+
+ @property
+ def all_urls(self):
+ urls = []
+ if self.url:
+ urls.append(self.url)
+
+ for args in self.versions.values():
+ if 'url' in args:
+ urls.append(args['url'])
+ return urls
- if not self._available_versions:
- tty.warn("Found no versions for %s" % self.name,
- "Check the list_url and list_depth attribute on the "
- + self.name + " package.",
- "Use them to tell Spack where to look for versions.")
- except spack.error.NoNetworkConnectionError, e:
- tty.die("Package.fetch_available_versions couldn't connect to:",
- e.url, e.message)
+ def fetch_remote_versions(self):
+ """Try to find remote versions of this package using the
+ list_url and any other URLs described in the package file."""
+ if not self.all_urls:
+ raise VersionFetchError(self.__class__)
- return self._available_versions
+ try:
+ return find_versions_of_archive(
+ *self.all_urls, list_url=self.list_url, list_depth=self.list_depth)
+ except spack.error.NoNetworkConnectionError, e:
+ tty.die("Package.fetch_versions couldn't connect to:",
+ e.url, e.message)
@property
- def available_versions(self):
- # If the package overrode available_versions, then use that.
- if self.versions is not None:
- return VersionList(self.versions.keys())
- else:
- vlist = self.fetch_available_versions()
- if not vlist:
- vlist = ver([self.version])
- return vlist
+ def rpath(self):
+ """Get the rpath this package links with, as a list of paths."""
+ rpaths = [self.prefix.lib, self.prefix.lib64]
+ rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
+ if os.path.isdir(d.prefix.lib))
+ rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
+ if os.path.isdir(d.prefix.lib64))
+ return rpaths
-def find_versions_of_archive(archive_url, **kwargs):
- list_url = kwargs.get('list_url', None)
- list_depth = kwargs.get('list_depth', 1)
+ @property
+ def rpath_args(self):
+ """Get the rpath args as a string, with -Wl,-rpath= for each element."""
+ return " ".join("-Wl,-rpath=%s" % p for p in self.rpath)
- if not list_url:
- list_url = url.find_list_url(archive_url)
- # This creates a regex from the URL with a capture group for the
- # version part of the URL. The capture group is converted to a
- # generic wildcard, so we can use this to extract things on a page
- # that look like archive URLs.
- url_regex = url.wildcard_version(archive_url)
+def find_versions_of_archive(*archive_urls, **kwargs):
+ list_url = kwargs.get('list_url', None)
+ list_depth = kwargs.get('list_depth', 1)
- # We'll be a bit more liberal and just look for the archive part,
- # not the full path.
- archive_regex = os.path.basename(url_regex)
+ # Generate a list of list_urls based on archive urls and any
+ # explicitly listed list_url in the package
+ list_urls = set()
+ if list_url:
+ list_urls.add(list_url)
+ for aurl in archive_urls:
+ list_urls.add(url.find_list_url(aurl))
# Grab some web pages to scrape.
- page_map = get_pages(list_url, depth=list_depth)
+ page_map = {}
+ for lurl in list_urls:
+ page_map.update(get_pages(lurl, depth=list_depth))
+
+ # Scrape them for archive URLs
+ regexes = []
+ for aurl in archive_urls:
+ # This creates a regex from the URL with a capture group for
+ # the version part of the URL. The capture group is converted
+ # to a generic wildcard, so we can use this to extract things
+ # on a page that look like archive URLs.
+ url_regex = url.wildcard_version(aurl)
+
+ # We'll be a bit more liberal and just look for the archive
+ # part, not the full path.
+ regexes.append(os.path.basename(url_regex))
# Build a version list from all the matches we find
- versions = VersionList()
- for site, page in page_map.iteritems():
+ versions = {}
+ for page_url, content in page_map.iteritems():
# extract versions from matches.
- matches = re.finditer(archive_regex, page)
- version_strings = set(m.group(1) for m in matches)
- for v in version_strings:
- versions.add(Version(v))
+ for regex in regexes:
+ versions.update(
+ (Version(m.group(1)), urljoin(page_url, m.group(0)))
+ for m in re.finditer(regex, content))
return versions
@@ -930,15 +1170,23 @@ def validate_package_url(url_string):
def print_pkg(message):
"""Outputs a message with a package icon."""
- mac_ver = py_platform.mac_ver()[0]
- if mac_ver and Version(mac_ver) >= Version('10.7'):
- print u"\U0001F4E6" + tty.indent,
- else:
- from llnl.util.tty.color import cwrite
- cwrite('@*g{[+]} ')
+ from llnl.util.tty.color import cwrite
+ cwrite('@*g{[+]} ')
print message
+def _hms(seconds):
+ """Convert time in seconds to hours, minutes, seconds."""
+ m, s = divmod(seconds, 60)
+ h, m = divmod(m, 60)
+
+ parts = []
+ if h: parts.append("%dh" % h)
+ if m: parts.append("%dm" % m)
+ if s: parts.append("%.2fs" % s)
+ return ' '.join(parts)
+
+
class FetchError(spack.error.SpackError):
"""Raised when something goes wrong during fetch."""
def __init__(self, message, long_msg=None):
@@ -976,8 +1224,8 @@ class VersionFetchError(PackageError):
"""Raised when a version URL cannot automatically be determined."""
def __init__(self, cls):
super(VersionFetchError, self).__init__(
- "Cannot fetch version for package %s " % cls.__name__ +
- "because it does not define a default url.")
+ "Cannot fetch versions for package %s " % cls.__name__ +
+ "because it does not define any URLs to fetch.")
class NoURLError(PackageError):
@@ -985,3 +1233,17 @@ class NoURLError(PackageError):
def __init__(self, cls):
super(NoURLError, self).__init__(
"Package %s has no version with a URL." % cls.__name__)
+
+
+class ExtensionError(PackageError): pass
+
+
+class ExtensionConflictError(ExtensionError):
+ def __init__(self, path):
+ super(ExtensionConflictError, self).__init__(
+ "Extension blocked by file: %s" % path)
+
+
+class ActivationError(ExtensionError):
+ def __init__(self, msg, long_msg=None):
+ super(ActivationError, self).__init__(msg, long_msg)
diff --git a/lib/spack/spack/packages.py b/lib/spack/spack/packages.py
index 047d82a93a..43c4c191c1 100644
--- a/lib/spack/spack/packages.py
+++ b/lib/spack/spack/packages.py
@@ -30,7 +30,7 @@ import imp
import llnl.util.tty as tty
from llnl.util.filesystem import join_path
-from llnl.util.lang import memoized
+from llnl.util.lang import *
import spack.error
import spack.spec
@@ -74,8 +74,11 @@ class PackageDB(object):
if not spec in self.instances:
package_class = self.get_class_for_package_name(spec.name)
try:
- self.instances[spec.copy()] = package_class(spec)
+ copy = spec.copy()
+ self.instances[copy] = package_class(copy)
except Exception, e:
+ if spack.debug:
+ sys.excepthook(*sys.exc_info())
raise FailedConstructorError(spec.name, e)
return self.instances[spec]
@@ -109,6 +112,24 @@ class PackageDB(object):
return providers
+ @_autospec
+ def extensions_for(self, extendee_spec):
+ return [p for p in self.all_packages() if p.extends(extendee_spec)]
+
+
+ @_autospec
+ def installed_extensions_for(self, extendee_spec):
+ for s in self.installed_package_specs():
+ try:
+ if s.package.extends(extendee_spec):
+ yield s.package
+ except UnknownPackageError, e:
+ # Skip packages we know nothing about
+ continue
+ # TODO: add some conditional way to do this instead of
+ # catching exceptions.
+
+
def dirname_for_package_name(self, pkg_name):
"""Get the directory name for a particular package. This is the
directory that contains its package.py file."""
@@ -171,6 +192,7 @@ class PackageDB(object):
yield self.get(name)
+ @memoized
def exists(self, pkg_name):
"""Whether a package with the supplied name exists ."""
return os.path.exists(self.filename_for_package_name(pkg_name))
@@ -213,38 +235,6 @@ class PackageDB(object):
return cls
- def graph_dependencies(self, out=sys.stdout):
- """Print out a graph of all the dependencies between package.
- Graph is in dot format."""
- out.write('digraph G {\n')
- out.write(' label = "Spack Dependencies"\n')
- out.write(' labelloc = "b"\n')
- out.write(' rankdir = "LR"\n')
- out.write(' ranksep = "5"\n')
- out.write('\n')
-
- def quote(string):
- return '"%s"' % string
-
- deps = []
- for pkg in self.all_packages():
- out.write(' %-30s [label="%s"]\n' % (quote(pkg.name), pkg.name))
-
- # Add edges for each depends_on in the package.
- for dep_name, dep in pkg.dependencies.iteritems():
- deps.append((pkg.name, dep_name))
-
- # If the package provides something, add an edge for that.
- for provider in set(p.name for p in pkg.provided):
- deps.append((provider, pkg.name))
-
- out.write('\n')
-
- for pair in deps:
- out.write(' "%s" -> "%s"\n' % pair)
- out.write('}\n')
-
-
class UnknownPackageError(spack.error.SpackError):
"""Raised when we encounter a package spack doesn't have."""
def __init__(self, name):
diff --git a/lib/spack/spack/relations.py b/lib/spack/spack/relations.py
index b1f4348945..a0c7723473 100644
--- a/lib/spack/spack/relations.py
+++ b/lib/spack/spack/relations.py
@@ -68,7 +68,7 @@ provides
spack install mpileaks ^mvapich
spack install mpileaks ^mpich
"""
-__all__ = [ 'depends_on', 'provides', 'patch', 'version' ]
+__all__ = [ 'depends_on', 'extends', 'provides', 'patch', 'version' ]
import re
import inspect
@@ -107,8 +107,9 @@ def depends_on(*specs):
"""Adds a dependencies local variable in the locals of
the calling class, based on args. """
pkg = get_calling_package_name()
+ clocals = caller_locals()
+ dependencies = clocals.setdefault('dependencies', {})
- dependencies = caller_locals().setdefault('dependencies', {})
for string in specs:
for spec in spack.spec.parse(string):
if pkg == spec.name:
@@ -116,6 +117,34 @@ def depends_on(*specs):
dependencies[spec.name] = spec
+def extends(spec, **kwargs):
+ """Same as depends_on, but dependency is symlinked into parent prefix.
+
+ This is for Python and other language modules where the module
+ needs to be installed into the prefix of the Python installation.
+ Spack handles this by installing modules into their own prefix,
+ but allowing ONE module version to be symlinked into a parent
+ Python install at a time.
+
+ keyword arguments can be passed to extends() so that extension
+ packages can pass parameters to the extendee's extension
+ mechanism.
+
+ """
+ pkg = get_calling_package_name()
+ clocals = caller_locals()
+ dependencies = clocals.setdefault('dependencies', {})
+ extendees = clocals.setdefault('extendees', {})
+ if extendees:
+ raise RelationError("Packages can extend at most one other package.")
+
+ spec = Spec(spec)
+ if pkg == spec.name:
+ raise CircularReferenceError('extends', pkg)
+ dependencies[spec.name] = spec
+ extendees[spec.name] = (spec, kwargs)
+
+
def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index a0ab38c049..dffdccaddb 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -553,6 +553,13 @@ class Spec(object):
@property
+ def cshort_spec(self):
+ """Returns a version of the spec with the dependencies hashed
+ instead of completely enumerated."""
+ return self.format('$_$@$%@$+$=$#', color=True)
+
+
+ @property
def prefix(self):
return Prefix(spack.install_layout.path_for_spec(self))
@@ -712,6 +719,15 @@ class Spec(object):
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
+ def index(self):
+ """Return DependencyMap that points to all the dependencies in this
+ spec."""
+ dm = DependencyMap()
+ for spec in self.traverse():
+ dm[spec.name] = spec
+ return dm
+
+
def flatten(self):
"""Pull all dependencies up to the root (this spec).
Merge constraints for dependencies with the same name, and if they
@@ -858,7 +874,7 @@ class Spec(object):
def normalized(self):
"""Return a normalized copy of this spec without modifying this spec."""
clone = self.copy()
- clone.normalized()
+ clone.normalize()
return clone
@@ -1096,8 +1112,9 @@ class Spec(object):
def __contains__(self, spec):
- """True if this spec has any dependency that satisfies the supplied
- spec."""
+ """True if this spec satisfis the provided spec, or if any dependency
+ does. If the spec has no name, then we parse this one first.
+ """
spec = self._autospec(spec)
for s in self.traverse():
if s.satisfies(spec):
@@ -1288,12 +1305,13 @@ class Spec(object):
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
with indentation."""
- color = kwargs.get('color', False)
- depth = kwargs.get('depth', False)
- showid = kwargs.get('ids', False)
- cover = kwargs.get('cover', 'nodes')
- indent = kwargs.get('indent', 0)
- format = kwargs.get('format', '$_$@$%@$+$=')
+ color = kwargs.pop('color', False)
+ depth = kwargs.pop('depth', False)
+ showid = kwargs.pop('ids', False)
+ cover = kwargs.pop('cover', 'nodes')
+ indent = kwargs.pop('indent', 0)
+ fmt = kwargs.pop('format', '$_$@$%@$+$=')
+ check_kwargs(kwargs, self.tree)
out = ""
cur_id = 0
@@ -1310,7 +1328,7 @@ class Spec(object):
out += (" " * d)
if d > 0:
out += "^"
- out += node.format(format, color=color) + "\n"
+ out += node.format(fmt, color=color) + "\n"
return out
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index b371761785..84454c9d2c 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -257,8 +257,11 @@ class Stage(object):
fetcher.fetch()
break
except spack.error.SpackError, e:
- tty.msg("Fetching %s failed." % fetcher)
+ tty.msg("Fetching from %s failed." % fetcher)
+ tty.debug(e)
continue
+ else:
+ tty.die("All fetchers failed for %s" % self.name)
def check(self):
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 730e75ce1d..c53e6774fc 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -50,7 +50,9 @@ test_names = ['versions',
'svn_fetch',
'hg_fetch',
'mirror',
- 'url_extrapolate']
+ 'url_extrapolate',
+ 'cc',
+ 'link_tree']
def list_tests():
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
new file mode 100644
index 0000000000..aa16f9b351
--- /dev/null
+++ b/lib/spack/spack/test/cc.py
@@ -0,0 +1,130 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+This test checks that the Spack cc compiler wrapper is parsing
+arguments correctly.
+"""
+import os
+import unittest
+
+from llnl.util.filesystem import *
+import spack
+from spack.util.executable import *
+
+# Complicated compiler test command
+test_command = [
+ '-I/test/include', '-L/test/lib', '-L/other/lib', '-I/other/include',
+ 'arg1',
+ '-Wl,--start-group',
+ 'arg2',
+ '-Wl,-rpath=/first/rpath', 'arg3', '-Wl,-rpath', '-Wl,/second/rpath',
+ '-llib1', '-llib2',
+ 'arg4',
+ '-Wl,--end-group',
+ '-Xlinker,-rpath', '-Xlinker,/third/rpath', '-Xlinker,-rpath=/fourth/rpath',
+ '-llib3', '-llib4',
+ 'arg5', 'arg6']
+
+
+class CompilerTest(unittest.TestCase):
+
+ def setUp(self):
+ self.cc = Executable(join_path(spack.build_env_path, "cc"))
+ self.ld = Executable(join_path(spack.build_env_path, "ld"))
+ self.cpp = Executable(join_path(spack.build_env_path, "cpp"))
+
+ os.environ['SPACK_CC'] = "/bin/mycc"
+ os.environ['SPACK_PREFIX'] = "/usr"
+ os.environ['SPACK_ENV_PATH']="test"
+ os.environ['SPACK_DEBUG_LOG_DIR'] = "."
+ os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7"
+ os.environ['SPACK_SHORT_SPEC'] = "foo@1.2"
+
+
+ def check_cc(self, command, args, expected):
+ os.environ['SPACK_TEST_COMMAND'] = command
+ self.assertEqual(self.cc(*args, return_output=True).strip(), expected)
+
+
+ def check_ld(self, command, args, expected):
+ os.environ['SPACK_TEST_COMMAND'] = command
+ self.assertEqual(self.ld(*args, return_output=True).strip(), expected)
+
+
+ def check_cpp(self, command, args, expected):
+ os.environ['SPACK_TEST_COMMAND'] = command
+ self.assertEqual(self.cpp(*args, return_output=True).strip(), expected)
+
+
+ def test_vcheck_mode(self):
+ self.check_cc('dump-mode', ['-I/include', '--version'], "vcheck")
+ self.check_cc('dump-mode', ['-I/include', '-V'], "vcheck")
+ self.check_cc('dump-mode', ['-I/include', '-v'], "vcheck")
+ self.check_cc('dump-mode', ['-I/include', '-dumpversion'], "vcheck")
+ self.check_cc('dump-mode', ['-I/include', '--version', '-c'], "vcheck")
+ self.check_cc('dump-mode', ['-I/include', '-V', '-o', 'output'], "vcheck")
+
+
+ def test_cpp_mode(self):
+ self.check_cc('dump-mode', ['-E'], "cpp")
+ self.check_cpp('dump-mode', [], "cpp")
+
+
+ def test_ccld_mode(self):
+ self.check_cc('dump-mode', [], "ccld")
+ self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld")
+ self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath=foo'], "ccld")
+ self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ccld")
+
+
+ def test_ld_mode(self):
+ self.check_ld('dump-mode', [], "ld")
+ self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath=foo'], "ld")
+
+
+ def test_includes(self):
+ self.check_cc('dump-includes', test_command,
+ "\n".join(["/test/include", "/other/include"]))
+
+
+ def test_libraries(self):
+ self.check_cc('dump-libraries', test_command,
+ "\n".join(["/test/lib", "/other/lib"]))
+
+
+ def test_libs(self):
+ self.check_cc('dump-libs', test_command,
+ "\n".join(["lib1", "lib2", "lib3", "lib4"]))
+
+
+ def test_rpaths(self):
+ self.check_cc('dump-rpaths', test_command,
+ "\n".join(["/first/rpath", "/second/rpath", "/third/rpath", "/fourth/rpath"]))
+
+
+ def test_other_args(self):
+ self.check_cc('dump-other-args', test_command,
+ "\n".join(["arg1", "-Wl,--start-group", "arg2", "arg3", "arg4",
+ "-Wl,--end-group", "arg5", "arg6"]))
diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py
index f6d9bfcf05..04422adb57 100644
--- a/lib/spack/spack/test/git_fetch.py
+++ b/lib/spack/spack/test/git_fetch.py
@@ -61,7 +61,7 @@ class GitFetchTest(MockPackagesTest):
if self.repo.stage is not None:
self.repo.stage.destroy()
- self.pkg.do_clean_dist()
+ self.pkg.do_clean()
def assert_rev(self, rev):
@@ -93,7 +93,7 @@ class GitFetchTest(MockPackagesTest):
untracked_file = 'foobarbaz'
touch(untracked_file)
self.assertTrue(os.path.isfile(untracked_file))
- self.pkg.do_clean_work()
+ self.pkg.do_restage()
self.assertFalse(os.path.isfile(untracked_file))
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py
index 97c5b665e7..e1ab2cffe6 100644
--- a/lib/spack/spack/test/hg_fetch.py
+++ b/lib/spack/spack/test/hg_fetch.py
@@ -60,7 +60,7 @@ class HgFetchTest(MockPackagesTest):
if self.repo.stage is not None:
self.repo.stage.destroy()
- self.pkg.do_clean_dist()
+ self.pkg.do_clean()
def try_fetch(self, rev, test_file, args):
@@ -87,7 +87,7 @@ class HgFetchTest(MockPackagesTest):
untracked = 'foobarbaz'
touch(untracked)
self.assertTrue(os.path.isfile(untracked))
- self.pkg.do_clean_work()
+ self.pkg.do_restage()
self.assertFalse(os.path.isfile(untracked))
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py
new file mode 100644
index 0000000000..bc7c2c6b5e
--- /dev/null
+++ b/lib/spack/spack/test/link_tree.py
@@ -0,0 +1,153 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import unittest
+import shutil
+import tempfile
+from contextlib import closing
+
+from llnl.util.filesystem import *
+from llnl.util.link_tree import LinkTree
+
+from spack.stage import Stage
+
+
+class LinkTreeTest(unittest.TestCase):
+ """Tests Spack's LinkTree class."""
+
+ def setUp(self):
+ self.stage = Stage('link-tree-test')
+
+ with working_dir(self.stage.path):
+ touchp('source/1')
+ touchp('source/a/b/2')
+ touchp('source/a/b/3')
+ touchp('source/c/4')
+ touchp('source/c/d/5')
+ touchp('source/c/d/6')
+ touchp('source/c/d/e/7')
+
+ source_path = os.path.join(self.stage.path, 'source')
+ self.link_tree = LinkTree(source_path)
+
+
+ def tearDown(self):
+ if self.stage:
+ self.stage.destroy()
+
+
+ def check_file_link(self, filename):
+ self.assertTrue(os.path.isfile(filename))
+ self.assertTrue(os.path.islink(filename))
+
+
+ def check_dir(self, filename):
+ self.assertTrue(os.path.isdir(filename))
+
+
+ def test_merge_to_new_directory(self):
+ with working_dir(self.stage.path):
+ self.link_tree.merge('dest')
+
+ self.check_file_link('dest/1')
+ self.check_file_link('dest/a/b/2')
+ self.check_file_link('dest/a/b/3')
+ self.check_file_link('dest/c/4')
+ self.check_file_link('dest/c/d/5')
+ self.check_file_link('dest/c/d/6')
+ self.check_file_link('dest/c/d/e/7')
+
+ self.link_tree.unmerge('dest')
+
+ self.assertFalse(os.path.exists('dest'))
+
+
+ def test_merge_to_existing_directory(self):
+ with working_dir(self.stage.path):
+
+ touchp('dest/x')
+ touchp('dest/a/b/y')
+
+ self.link_tree.merge('dest')
+
+ self.check_file_link('dest/1')
+ self.check_file_link('dest/a/b/2')
+ self.check_file_link('dest/a/b/3')
+ self.check_file_link('dest/c/4')
+ self.check_file_link('dest/c/d/5')
+ self.check_file_link('dest/c/d/6')
+ self.check_file_link('dest/c/d/e/7')
+
+ self.assertTrue(os.path.isfile('dest/x'))
+ self.assertTrue(os.path.isfile('dest/a/b/y'))
+
+ self.link_tree.unmerge('dest')
+
+ self.assertTrue(os.path.isfile('dest/x'))
+ self.assertTrue(os.path.isfile('dest/a/b/y'))
+
+ self.assertFalse(os.path.isfile('dest/1'))
+ self.assertFalse(os.path.isfile('dest/a/b/2'))
+ self.assertFalse(os.path.isfile('dest/a/b/3'))
+ self.assertFalse(os.path.isfile('dest/c/4'))
+ self.assertFalse(os.path.isfile('dest/c/d/5'))
+ self.assertFalse(os.path.isfile('dest/c/d/6'))
+ self.assertFalse(os.path.isfile('dest/c/d/e/7'))
+
+
+ def test_merge_with_empty_directories(self):
+ with working_dir(self.stage.path):
+ mkdirp('dest/f/g')
+ mkdirp('dest/a/b/h')
+
+ self.link_tree.merge('dest')
+ self.link_tree.unmerge('dest')
+
+ self.assertFalse(os.path.exists('dest/1'))
+ self.assertFalse(os.path.exists('dest/a/b/2'))
+ self.assertFalse(os.path.exists('dest/a/b/3'))
+ self.assertFalse(os.path.exists('dest/c/4'))
+ self.assertFalse(os.path.exists('dest/c/d/5'))
+ self.assertFalse(os.path.exists('dest/c/d/6'))
+ self.assertFalse(os.path.exists('dest/c/d/e/7'))
+
+ self.assertTrue(os.path.isdir('dest/a/b/h'))
+ self.assertTrue(os.path.isdir('dest/f/g'))
+
+
+ def test_ignore(self):
+ with working_dir(self.stage.path):
+ touchp('source/.spec')
+ touchp('dest/.spec')
+
+ self.link_tree.merge('dest', ignore=lambda x: x == '.spec')
+ self.link_tree.unmerge('dest', ignore=lambda x: x == '.spec')
+
+ self.assertFalse(os.path.exists('dest/1'))
+ self.assertFalse(os.path.exists('dest/a'))
+ self.assertFalse(os.path.exists('dest/c'))
+
+ self.assertTrue(os.path.isfile('source/.spec'))
+ self.assertTrue(os.path.isfile('dest/.spec'))
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index 51334198ec..89ab14359e 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -44,7 +44,7 @@ class MirrorTest(MockPackagesTest):
self.repos = {}
- def set_up_package(self, name, mock_repo_class, url_attr):
+ def set_up_package(self, name, MockRepoClass, url_attr):
"""Use this to set up a mock package to be mirrored.
Each package needs us to:
1. Set up a mock repo/archive to fetch from.
@@ -56,7 +56,7 @@ class MirrorTest(MockPackagesTest):
# Get the package and fix its fetch args to point to a mock repo
pkg = spack.db.get(spec)
- repo = mock_repo_class()
+ repo = MockRepoClass()
self.repos[name] = repo
# change the fetch args of the first (only) version.
@@ -71,7 +71,7 @@ class MirrorTest(MockPackagesTest):
for name, repo in self.repos.items():
if repo.stage:
- repo.stage.destroy()
+ pass #repo.stage.destroy()
self.repos.clear()
@@ -129,7 +129,7 @@ class MirrorTest(MockPackagesTest):
self.assertTrue(all(l in exclude for l in dcmp.left_only))
finally:
- stage.destroy()
+ pass #stage.destroy()
def test_git_mirror(self):
diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py
index 04b4eadf34..f814df3226 100644
--- a/lib/spack/spack/test/python_version.py
+++ b/lib/spack/spack/test/python_version.py
@@ -45,7 +45,6 @@ class PythonVersionTest(unittest.TestCase):
def spack_python_files(self):
# first file is the spack script.
yield spack.spack_file
- yield os.path.join(spack.build_env_path, 'cc')
# Next files are all the source files and package files.
search_paths = [spack.lib_path, spack.var_path]
diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py
index a48a86dcc3..0159fb087f 100644
--- a/lib/spack/spack/test/svn_fetch.py
+++ b/lib/spack/spack/test/svn_fetch.py
@@ -60,7 +60,7 @@ class SvnFetchTest(MockPackagesTest):
if self.repo.stage is not None:
self.repo.stage.destroy()
- self.pkg.do_clean_dist()
+ self.pkg.do_clean()
def assert_rev(self, rev):
@@ -99,7 +99,7 @@ class SvnFetchTest(MockPackagesTest):
untracked = 'foobarbaz'
touch(untracked)
self.assertTrue(os.path.isfile(untracked))
- self.pkg.do_clean_work()
+ self.pkg.do_restage()
self.assertFalse(os.path.isfile(untracked))
self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py
index c30ff1f009..00d8216020 100644
--- a/lib/spack/spack/test/url_extrapolate.py
+++ b/lib/spack/spack/test/url_extrapolate.py
@@ -32,60 +32,76 @@ from spack.version import ver
from spack.test.mock_packages_test import *
-class UrlExtrapolateTest(MockPackagesTest):
-
- def test_known_version(self):
- d = spack.db.get('dyninst')
-
- self.assertEqual(
- d.url_for_version('8.2'), 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz')
- self.assertEqual(
- d.url_for_version('8.1.2'), 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
- self.assertEqual(
- d.url_for_version('8.1.1'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
-
-
- def test_extrapolate_version(self):
- d = spack.db.get('dyninst')
-
- # Nearest URL for 8.1.1.5 is 8.1.1, and the URL there is
- # release8.1/DyninstAPI-8.1.1.tgz. Only the last part matches
- # the version, so only extrapolate the last part. Obviously
- # dyninst has ambiguous URL versions, but we want to make sure
- # extrapolation works in a well-defined way.
- self.assertEqual(
- d.url_for_version('8.1.1.5'), 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.5.tgz')
-
- # 8.2 matches both the release8.2 component and the DyninstAPI-8.2 component.
- # Extrapolation should replace both with the new version.
- # TODO: figure out a consistent policy for this.
- # self.assertEqual(
- # d.url_for_version('8.2.3'), 'http://www.paradyn.org/release8.2.3/DyninstAPI-8.2.3.tgz')
-
-
- def test_with_package(self):
- d = spack.db.get('dyninst@8.2')
- self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz')
-
- d = spack.db.get('dyninst@8.1.2')
- self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
-
- d = spack.db.get('dyninst@8.1.1')
- self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
-
-
- def test_concrete_package(self):
- s = Spec('dyninst@8.2')
- s.concretize()
- d = spack.db.get(s)
- self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.2/DyninstAPI-8.2.tgz')
-
- s = Spec('dyninst@8.1.2')
- s.concretize()
- d = spack.db.get(s)
- self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1.2/DyninstAPI-8.1.2.tgz')
-
- s = Spec('dyninst@8.1.1')
- s.concretize()
- d = spack.db.get(s)
- self.assertEqual(d.fetcher.url, 'http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz')
+class UrlExtrapolateTest(unittest.TestCase):
+
+ def check_url(self, base, version, new_url):
+ self.assertEqual(url.substitute_version(base, version), new_url)
+
+
+ def test_libelf_version(self):
+ base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
+ self.check_url(base, '0.8.13', base)
+ self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
+ self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
+ self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
+
+
+ def test_libdwarf_version(self):
+ base = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
+ self.check_url(base, '20130729', base)
+ self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
+
+
+ def test_dyninst_version(self):
+ # Dyninst has a version twice in the URL.
+ base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
+ self.check_url(base, '8.1.2', base)
+ self.check_url(base, '8.2',
+ "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.2/DyninstAPI-8.2.tgz")
+ self.check_url(base, '8.3.1',
+ "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz")
+
+
+ def test_partial_version_prefix(self):
+ # Test now with a partial prefix earlier in the URL -- this is
+ # hard to figure out so Spack only substitutes the last
+ # instance of the version.
+ base = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.2.tgz"
+ self.check_url(base, '8.1.2', base)
+ self.check_url(base, '8.1.4',
+ "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.1.4.tgz")
+ self.check_url(base, '8.2',
+ "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.2.tgz")
+ self.check_url(base, '8.3.1',
+ "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz")
+
+
+ def test_scalasca_partial_version(self):
+ # Note that this probably doesn't actually work, but sites are
+ # inconsistent about their directory structure, so it's not
+ # clear what is right. This test is for consistency and to
+ # document behavior. If you figure out a good way to handle
+ # this case, fix the tests too.
+ self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
+ 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
+ self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
+ 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
+
+
+ def test_mpileaks_version(self):
+ self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
+ 'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz')
+
+
+ def test_gcc(self):
+ self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7',
+ 'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2')
+ self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7',
+ 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
+
+
+ def test_github_raw(self):
+ self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7',
+ 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
+ self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '4.7',
+ 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v4.7.tgz?raw=true')
diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py
index 7a4d201d90..ae1d559f7c 100644
--- a/lib/spack/spack/test/url_parse.py
+++ b/lib/spack/spack/test/url_parse.py
@@ -295,3 +295,35 @@ class UrlParseTest(unittest.TestCase):
self.check(
'hdf5', '1.8.13',
'http://www.hdfgroup.org/ftp/HDF5/current/src/hdf5-1.8.13.tar.bz2')
+
+ def test_scalasca_version(self):
+ self.check(
+ 'cube', '4.2.3',
+ 'http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz')
+ self.check(
+ 'cube', '4.3-TP1',
+ 'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz')
+
+ def test_mpileaks_version(self):
+ self.check(
+ 'mpileaks', '1.0',
+ 'https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz')
+ self.check(
+ 'mpileaks', '1.0',
+ 'https://github.com/hpc/mpileaks/releases/download/1.0/mpileaks-1.0.tar.gz')
+
+ def test_gcc_version(self):
+ self.check(
+ 'gcc', '4.4.7',
+ 'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
+
+ def test_gcc_version_precedence(self):
+ # prefer the version in the tarball, not in the url prefix.
+ self.check(
+ 'gcc', '4.4.7',
+ 'http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.4.7.tar.bz2')
+
+ def test_github_raw_url(self):
+ self.check(
+ 'PowerParser', '2.0.7',
+ 'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index e2fbb19f5d..58838306af 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -46,6 +46,11 @@ it's never been told about that version before.
"""
import os
import re
+from StringIO import StringIO
+from urlparse import urlsplit, urlunsplit
+
+import llnl.util.tty as tty
+from llnl.util.tty.color import *
import spack.error
import spack.util.compression as comp
@@ -57,27 +62,6 @@ from spack.version import Version
# "path" seemed like the most generic term.
#
-class UrlParseError(spack.error.SpackError):
- """Raised when the URL module can't parse something correctly."""
- def __init__(self, msg, path):
- super(UrlParseError, self).__init__(msg)
- self.path = path
-
-
-class UndetectableVersionError(UrlParseError):
- """Raised when we can't parse a version from a string."""
- def __init__(self, path):
- super(UndetectableVersionError, self).__init__(
- "Couldn't detect version in: " + path, path)
-
-
-class UndetectableNameError(UrlParseError):
- """Raised when we can't parse a package name from a string."""
- def __init__(self, path):
- super(UndetectableNameError, self).__init__(
- "Couldn't parse package name in: " + path, path)
-
-
def find_list_url(url):
"""Finds a good list URL for the supplied URL. This depends on
the site. By default, just assumes that a good list URL is the
@@ -98,20 +82,96 @@ def find_list_url(url):
return os.path.dirname(url)
-def parse_version_string_with_indices(path):
+def strip_query_and_fragment(path):
+ try:
+ components = urlsplit(path)
+ stripped = components[:3] + (None, None)
+
+ query, frag = components[3:5]
+ suffix = ''
+ if query: suffix += '?' + query
+ if frag: suffix += '#' + frag
+
+ return (urlunsplit(stripped), suffix)
+
+ except ValueError:
+ tty.debug("Got error parsing path %s" % path)
+ return (path, '') # Ignore URL parse errors here
+
+
+def split_url_extension(path):
+ """Some URLs have a query string, e.g.:
+
+ 1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
+ 2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
+
+ In (1), the query string needs to be stripped to get at the
+ extension, but in (2), the filename is IN a single final query
+ argument.
+
+ This strips the URL into three pieces: prefix, ext, and suffix.
+ The suffix contains anything that was stripped off the URL to
+ get at the file extension. In (1), it will be '?raw=true', but
+ in (2), it will be empty. e.g.:
+
+ 1. ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')
+ 2. ('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin',
+ '.tar.gz', None)
+ """
+ prefix, ext, suffix = path, '', ''
+
+ # Strip off sourceforge download suffix.
+ match = re.search(r'((?:sourceforge.net|sf.net)/.*)(/download)$', path)
+ if match:
+ prefix, suffix = match.groups()
+
+ ext = comp.extension(prefix)
+ if ext is not None:
+ prefix = comp.strip_extension(prefix)
+
+ else:
+ prefix, suf = strip_query_and_fragment(prefix)
+ ext = comp.extension(prefix)
+ prefix = comp.strip_extension(prefix)
+ suffix = suf + suffix
+ if ext is None:
+ ext = ''
+
+ return prefix, ext, suffix
+
+
+def downloaded_file_extension(path):
+ """This returns the type of archive a URL refers to. This is
+ sometimes confusing becasue of URLs like:
+
+ (1) https://github.com/petdance/ack/tarball/1.93_02
+
+ Where the URL doesn't actually contain the filename. We need
+ to know what type it is so that we can appropriately name files
+ in mirrors.
+ """
+ match = re.search(r'github.com/.+/(zip|tar)ball/', path)
+ if match:
+ if match.group(1) == 'zip': return 'zip'
+ elif match.group(1) == 'tar': return 'tar.gz'
+
+ prefix, ext, suffix = split_url_extension(path)
+ if not ext:
+ raise UrlParseError("Cannot deduce archive type in %s" % path, path)
+ return ext
+
+
+def parse_version_offset(path):
"""Try to extract a version string from a filename or URL. This is taken
largely from Homebrew's Version class."""
+ original_path = path
- # Strip off sourceforge download stuffix.
- if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):
- path = os.path.dirname(path)
-
- # Strip archive extension
- path = comp.strip_extension(path)
+ path, ext, suffix = split_url_extension(path)
- # Take basename to avoid including parent dirs in version name
- # Remember the offset of the stem in the full path.
+ # Allow matches against the basename, to avoid including parent
+ # dirs in version name Remember the offset of the stem in the path
stem = os.path.basename(path)
+ offset = len(path) - len(stem)
version_types = [
# GitHub tarballs, e.g. v1.2.3
@@ -127,11 +187,15 @@ def parse_version_string_with_indices(path):
(r'github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+_(\d+))$', path),
# e.g. https://github.com/hpc/lwgrp/archive/v1.0.1.tar.gz
- (r'github.com/[^/]+/[^/]+/archive/v?(\d+(?:\.\d+)*)\.tar\.gz$', path),
+ (r'github.com/[^/]+/[^/]+/archive/v?(\d+(?:\.\d+)*)$', path),
# e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style)
(r'[-_](R\d+[AB]\d*(-\d+)?)', path),
+ # e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz
+ # e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
+ (r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path),
+
# e.g. boost_1_39_0
(r'((\d+_)+\d+)$', stem),
@@ -146,7 +210,7 @@ def parse_version_string_with_indices(path):
(r'-((\d+\.)*\d+)$', stem),
# e.g. foobar-4.5.1b
- (r'-((\d+\.)*\d+([a-z]|rc|RC)\d*)$', stem),
+ (r'-((\d+\.)*\d+\-?([a-z]|rc|RC|tp|TP)\d*)$', stem),
# e.g. foobar-4.5.0-beta1, or foobar-4.50-beta
(r'-((\d+\.)*\d+-beta(\d+)?)$', stem),
@@ -172,45 +236,77 @@ def parse_version_string_with_indices(path):
# e.g. http://www.ijg.org/files/jpegsrc.v8d.tar.gz
(r'\.v(\d+[a-z]?)', stem)]
- for vtype in version_types:
- regex, match_string = vtype[:2]
+ for i, vtype in enumerate(version_types):
+ regex, match_string = vtype
match = re.search(regex, match_string)
if match and match.group(1) is not None:
version = match.group(1)
- start = path.index(version)
- return version, start, start+len(version)
+ start = match.start(1)
- raise UndetectableVersionError(path)
+ # if we matched from the basename, then add offset in.
+ if match_string is stem:
+ start += offset
+
+ return version, start, len(version)
+
+ raise UndetectableVersionError(original_path)
def parse_version(path):
"""Given a URL or archive name, extract a version from it and return
a version object.
"""
- ver, start, end = parse_version_string_with_indices(path)
+ ver, start, l = parse_version_offset(path)
return Version(ver)
-def parse_name(path, ver=None):
- if ver is None:
- ver = parse_version(path)
-
- ntypes = (r'/sourceforge/([^/]+)/',
- r'/([^/]+)/(tarball|zipball)/',
- r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % ver,
- r'github.com/[^/]+/([^/]+)/archive',
- r'/([^/]+)[_.-]v?%s' % ver,
- r'/([^/]+)%s' % ver,
- r'^([^/]+)[_.-]v?%s' % ver,
- r'^([^/]+)%s' % ver)
-
- for nt in ntypes:
- match = re.search(nt, path)
+def parse_name_offset(path, v=None):
+ if v is None:
+ v = parse_version(path)
+
+ path, ext, suffix = split_url_extension(path)
+
+ # Allow matching with either path or stem, as with the version.
+ stem = os.path.basename(path)
+ offset = len(path) - len(stem)
+
+ name_types = [
+ (r'/sourceforge/([^/]+)/', path),
+ (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path),
+ (r'/([^/]+)/(tarball|zipball)/', path),
+ (r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path),
+ (r'github.com/[^/]+/([^/]+)/archive', path),
+
+ (r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem
+ (r'([^/]+)%s' % v, stem),
+
+ (r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem.
+ (r'/([^/]+)%s' % v, path),
+
+ (r'^([^/]+)[_.-]v?%s' % v, path),
+ (r'^([^/]+)%s' % v, path)]
+
+ for i, name_type in enumerate(name_types):
+ regex, match_string = name_type
+ match = re.search(regex, match_string)
if match:
- return match.group(1)
+ name = match.group(1)
+ start = match.start(1)
+
+ # if we matched from the basename, then add offset in.
+ if match_string is stem:
+ start += offset
+
+ return name, start, len(name)
+
raise UndetectableNameError(path)
+def parse_name(path, ver=None):
+ name, start, l = parse_name_offset(path, ver)
+ return name
+
+
def parse_name_and_version(path):
ver = parse_version(path)
name = parse_name(path, ver)
@@ -218,7 +314,7 @@ def parse_name_and_version(path):
def insensitize(string):
- """Chagne upper and lowercase letters to be case insensitive in
+ """Change upper and lowercase letters to be case insensitive in
the provided string. e.g., 'a' because '[Aa]', 'B' becomes
'[bB]', etc. Use for building regexes."""
def to_ins(match):
@@ -227,12 +323,53 @@ def insensitize(string):
return re.sub(r'([a-zA-Z])', to_ins, string)
-def substitute_version(path, new_version):
- """Given a URL or archive name, find the version in the path and substitute
- the new version for it.
+def cumsum(elts, init=0, fn=lambda x:x):
+ """Return cumulative sum of result of fn on each element in elts."""
+ sums = []
+ s = init
+ for i, e in enumerate(elts):
+ sums.append(s)
+ s += fn(e)
+ return sums
+
+
+def substitution_offsets(path):
+ """This returns offsets for substituting versions and names in the provided path.
+ It is a helper for substitute_version().
"""
- ver, start, end = parse_version_string_with_indices(path)
- return path[:start] + str(new_version) + path[end:]
+ # Get name and version offsets
+ try:
+ ver, vs, vl = parse_version_offset(path)
+ name, ns, nl = parse_name_offset(path, ver)
+ except UndetectableNameError, e:
+ return (None, -1, -1, (), ver, vs, vl, (vs,))
+ except UndetectableVersionError, e:
+ return (None, -1, -1, (), None, -1, -1, ())
+
+ # protect extensions like bz2 from getting inadvertently
+ # considered versions.
+ ext = comp.extension(path)
+ path = comp.strip_extension(path)
+
+ # Construct a case-insensitive regular expression for the package name.
+ name_re = '(%s)' % insensitize(name)
+
+ # Split the string apart by things that match the name so that if the
+ # name contains numbers or things that look like versions, we don't
+ # accidentally substitute them with a version.
+ name_parts = re.split(name_re, path)
+
+ offsets = cumsum(name_parts, 0, len)
+ name_offsets = offsets[1::2]
+
+ ver_offsets = []
+ for i in xrange(0, len(name_parts), 2):
+ vparts = re.split(ver, name_parts[i])
+ voffsets = cumsum(vparts, offsets[i], len)
+ ver_offsets.extend(voffsets[1::2])
+
+ return (name, ns, nl, tuple(name_offsets),
+ ver, vs, vl, tuple(ver_offsets))
def wildcard_version(path):
@@ -242,6 +379,8 @@ def wildcard_version(path):
# Get name and version, so we can treat them specially
name, v = parse_name_and_version(path)
+ path, ext, suffix = split_url_extension(path)
+
# Construct a case-insensitive regular expression for the package name.
name_re = '(%s)' % insensitize(name)
@@ -261,4 +400,111 @@ def wildcard_version(path):
name_parts[i] = vgroup.join(re.escape(vp) for vp in vparts)
# Put it all back together with original name matches intact.
- return ''.join(name_parts)
+ result = ''.join(name_parts)
+ if ext:
+ result += '.' + ext
+ result += suffix
+ return result
+
+
+def substitute_version(path, new_version):
+ """Given a URL or archive name, find the version in the path and
+ substitute the new version for it. Replace all occurrences of
+ the version *if* they don't overlap with the package name.
+
+ Simple example::
+ substitute_version('http://www.mr511.de/software/libelf-0.8.13.tar.gz', '2.9.3')
+ ->'http://www.mr511.de/software/libelf-2.9.3.tar.gz'
+
+ Complex examples::
+ substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz', 2.1)
+ -> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
+
+ # In this string, the "2" in mvapich2 is NOT replaced.
+ substitute_version('http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.tar.gz', 2.1)
+ -> 'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.1.tar.gz'
+
+ """
+ (name, ns, nl, noffs,
+ ver, vs, vl, voffs) = substitution_offsets(path)
+
+ new_path = ''
+ last = 0
+ for vo in voffs:
+ new_path += path[last:vo]
+ new_path += str(new_version)
+ last = vo + vl
+
+ new_path += path[last:]
+ return new_path
+
+
+def color_url(path, **kwargs):
+ """Color the parts of the url according to Spack's parsing.
+
+ Colors are:
+ Cyan: The version found by parse_version_offset().
+ Red: The name found by parse_name_offset().
+
+ Green: Instances of version string substituted by substitute_version().
+ Magenta: Instances of the name (protected from substitution).
+
+ Optional args:
+ errors=True Append parse errors at end of string.
+ subs=True Color substitutions as well as parsed name/version.
+
+ """
+ errors = kwargs.get('errors', False)
+ subs = kwargs.get('subs', False)
+
+ (name, ns, nl, noffs,
+ ver, vs, vl, voffs) = substitution_offsets(path)
+
+ nends = [no + nl - 1 for no in noffs]
+ vends = [vo + vl - 1 for vo in voffs]
+
+ nerr = verr = 0
+ out = StringIO()
+ for i in range(len(path)):
+ if i == vs: out.write('@c'); verr += 1
+ elif i == ns: out.write('@r'); nerr += 1
+ elif subs:
+ if i in voffs: out.write('@g')
+ elif i in noffs: out.write('@m')
+
+ out.write(path[i])
+
+ if i == vs + vl - 1: out.write('@.'); verr += 1
+ elif i == ns + nl - 1: out.write('@.'); nerr += 1
+ elif subs:
+ if i in vends or i in nends:
+ out.write('@.')
+
+ if errors:
+ if nerr == 0: out.write(" @r{[no name]}")
+ if verr == 0: out.write(" @r{[no version]}")
+ if nerr == 1: out.write(" @r{[incomplete name]}")
+ if verr == 1: out.write(" @r{[incomplete version]}")
+
+ return colorize(out.getvalue())
+
+
+class UrlParseError(spack.error.SpackError):
+ """Raised when the URL module can't parse something correctly."""
+ def __init__(self, msg, path):
+ super(UrlParseError, self).__init__(msg)
+ self.path = path
+
+
+class UndetectableVersionError(UrlParseError):
+ """Raised when we can't parse a version from a string."""
+ def __init__(self, path):
+ super(UndetectableVersionError, self).__init__(
+ "Couldn't detect version in: " + path, path)
+
+
+class UndetectableNameError(UrlParseError):
+ """Raised when we can't parse a package name from a string."""
+ def __init__(self, path):
+ super(UndetectableNameError, self).__init__(
+ "Couldn't parse package name in: " + path, path)
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index b4db266fd3..fd17785ad0 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -61,13 +61,15 @@ def strip_extension(path):
def extension(path):
"""Get the archive extension for a path."""
+ if path is None:
+ raise ValueError("Can't call extension() on None")
# Strip sourceforge suffix.
if re.search(r'((?:sourceforge.net|sf.net)/.*)/download$', path):
path = os.path.dirname(path)
- for type in ALLOWED_ARCHIVE_TYPES:
- suffix = r'\.%s$' % type
+ for t in ALLOWED_ARCHIVE_TYPES:
+ suffix = r'\.%s$' % t
if re.search(suffix, path):
- return type
+ return t
return None
diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py
index 435d912185..afdf51c707 100644
--- a/lib/spack/spack/util/environment.py
+++ b/lib/spack/spack/util/environment.py
@@ -24,6 +24,15 @@
##############################################################################
import os
+
+def get_path(name):
+ path = os.environ.get(name, "").strip()
+ if path:
+ return path.split(":")
+ else:
+ return []
+
+
def env_flag(name):
if name in os.environ:
return os.environ[name].lower() == "true"
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index ba42cb37b5..1420d62a77 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -25,7 +25,7 @@
import re
import sys
import subprocess
-import urllib2
+import urllib2, cookielib
import urlparse
from multiprocessing import Pool
from HTMLParser import HTMLParser, HTMLParseError
@@ -68,7 +68,7 @@ def _spider(args):
pool. Firing off all the child links at once makes the fetch MUCH
faster for pages with lots of children.
"""
- url, depth, max_depth, raise_on_error = args
+ url, visited, root, opener, depth, max_depth, raise_on_error = args
pages = {}
try:
@@ -82,12 +82,12 @@ def _spider(args):
resp = urllib2.urlopen(req, timeout=TIMEOUT)
if not "Content-type" in resp.headers:
- tty.warn("ignoring page " + url)
+ tty.debug("ignoring page " + url)
return pages
if not resp.headers["Content-type"].startswith('text/html'):
- tty.warn("ignoring page " + url + " with content type " +
- resp.headers["Content-type"])
+ tty.debug("ignoring page " + url + " with content type " +
+ resp.headers["Content-type"])
return pages
# Do the real GET request when we know it's just HTML.
@@ -114,15 +114,30 @@ def _spider(args):
# Evaluate the link relative to the page it came from.
abs_link = urlparse.urljoin(response_url, raw_link)
- subcalls.append((abs_link, depth+1, max_depth, raise_on_error))
+
+ # Skip things outside the root directory
+ if not abs_link.startswith(root):
+ continue
+
+ # Skip already-visited links
+ if abs_link in visited:
+ continue
+
+ subcalls.append((abs_link, visited, root, None, depth+1, max_depth, raise_on_error))
+ visited.add(abs_link)
if subcalls:
- pool = Pool(processes=len(subcalls))
- dicts = pool.map(_spider, subcalls)
- for d in dicts:
- pages.update(d)
+ try:
+ pool = Pool(processes=len(subcalls))
+ dicts = pool.map(_spider, subcalls)
+ for d in dicts:
+ pages.update(d)
+ finally:
+ pool.terminate()
+ pool.join()
except urllib2.URLError, e:
+ tty.debug(e)
if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url)
@@ -137,7 +152,8 @@ def _spider(args):
tty.warn(msg, url, "HTMLParseError: " + str(e))
except Exception, e:
- pass # Other types of errors are completely ignored.
+ # Other types of errors are completely ignored, except in debug mode.
+ tty.debug("Error in _spider: %s" % e)
return pages
@@ -151,5 +167,5 @@ def get_pages(root_url, **kwargs):
performance over a sequential fetch.
"""
max_depth = kwargs.setdefault('depth', 1)
- pages = _spider((root_url, 1, max_depth, False))
+ pages = _spider((root_url, set(), root_url, None, 1, max_depth, False))
return pages
diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py
index 960212eba9..ff5d7c9035 100644
--- a/lib/spack/spack/virtual.py
+++ b/lib/spack/spack/virtual.py
@@ -26,20 +26,21 @@
The ``virtual`` module contains utility classes for virtual dependencies.
"""
import spack.spec
+import itertools
class ProviderIndex(object):
"""This is a dict of dicts used for finding providers of particular
virtual dependencies. The dict of dicts looks like:
{ vpkg name :
- { full vpkg spec : package providing spec } }
+ { full vpkg spec : set(packages providing spec) } }
Callers can use this to first find which packages provide a vpkg,
then find a matching full spec. e.g., in this scenario:
{ 'mpi' :
- { mpi@:1.1 : mpich,
- mpi@:2.3 : mpich2@1.9: } }
+ { mpi@:1.1 : set([mpich]),
+ mpi@:2.3 : set([mpich2@1.9:]) } }
Calling providers_for(spec) will find specs that provide a
matching implementation of MPI.
@@ -75,15 +76,19 @@ class ProviderIndex(object):
if provided_name not in self.providers:
self.providers[provided_name] = {}
+ provider_map = self.providers[provided_name]
+ if not provided_spec in provider_map:
+ provider_map[provided_spec] = set()
+
if self.restrict:
- self.providers[provided_name][provided_spec] = spec
+ provider_map[provided_spec].add(spec)
else:
# Before putting the spec in the map, constrain it so that
# it provides what was asked for.
constrained = spec.copy()
constrained.constrain(provider_spec)
- self.providers[provided_name][provided_spec] = constrained
+ provider_map[provided_spec].add(constrained)
def providers_for(self, *vpkg_specs):
@@ -97,9 +102,9 @@ class ProviderIndex(object):
# Add all the providers that satisfy the vpkg spec.
if vspec.name in self.providers:
- for provider_spec, spec in self.providers[vspec.name].items():
+ for provider_spec, spec_set in self.providers[vspec.name].items():
if provider_spec.satisfies(vspec, deps=False):
- providers.add(spec)
+ providers.update(spec_set)
# Return providers in order
return sorted(providers)
@@ -108,16 +113,22 @@ class ProviderIndex(object):
# TODO: this is pretty darned nasty, and inefficient.
def _cross_provider_maps(self, lmap, rmap):
result = {}
- for lspec in lmap:
- for rspec in rmap:
- try:
- constrained = lspec.copy().constrain(rspec)
- if lmap[lspec].name != rmap[rspec].name:
+ for lspec, rspec in itertools.product(lmap, rmap):
+ try:
+ constrained = lspec.copy().constrain(rspec)
+ except spack.spec.UnsatisfiableSpecError:
+ continue
+
+ # lp and rp are left and right provider specs.
+ for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
+ if lp_spec.name == rp_spec.name:
+ try:
+ const = lp_spec.copy().constrain(rp_spec,deps=False)
+ if constrained not in result:
+ result[constrained] = set()
+ result[constrained].add(const)
+ except spack.spec.UnsatisfiableSpecError:
continue
- result[constrained] = lmap[lspec].copy().constrain(
- rmap[rspec], deps=False)
- except spack.spec.UnsatisfiableSpecError:
- continue
return result
@@ -132,6 +143,8 @@ class ProviderIndex(object):
if not common:
return True
+ # This ensures that some provider in other COULD satisfy the
+ # vpkg constraints on self.
result = {}
for name in common:
crossed = self._cross_provider_maps(self.providers[name],