summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst272
-rw-r--r--lib/spack/docs/configuration.rst23
-rw-r--r--lib/spack/docs/packaging_guide.rst140
-rw-r--r--lib/spack/spack/__init__.py3
-rw-r--r--lib/spack/spack/build_environment.py14
-rw-r--r--lib/spack/spack/cmd/create.py2
-rw-r--r--lib/spack/spack/cmd/diy.py2
-rw-r--r--lib/spack/spack/cmd/fetch.py2
-rw-r--r--lib/spack/spack/cmd/info.py14
-rw-r--r--lib/spack/spack/cmd/install.py5
-rw-r--r--lib/spack/spack/cmd/mirror.py2
-rw-r--r--lib/spack/spack/cmd/module.py4
-rw-r--r--lib/spack/spack/cmd/package-list.py14
-rw-r--r--lib/spack/spack/cmd/setup.py2
-rw-r--r--lib/spack/spack/cmd/test-install.py7
-rw-r--r--lib/spack/spack/concretize.py16
-rw-r--r--lib/spack/spack/database.py22
-rw-r--r--lib/spack/spack/directives.py39
-rw-r--r--lib/spack/spack/directory_layout.py4
-rw-r--r--lib/spack/spack/fetch_strategy.py9
-rw-r--r--lib/spack/spack/graph.py10
-rw-r--r--lib/spack/spack/modules.py2
-rw-r--r--lib/spack/spack/package.py38
-rw-r--r--lib/spack/spack/platforms/linux.py15
-rw-r--r--lib/spack/spack/spec.py352
-rw-r--r--lib/spack/spack/test/cmd/test_install.py33
-rw-r--r--lib/spack/spack/test/concretize.py24
-rw-r--r--lib/spack/spack/test/mock_packages_test.py26
-rw-r--r--lib/spack/spack/test/mock_repo.py4
-rw-r--r--lib/spack/spack/test/spec_dag.py88
-rw-r--r--lib/spack/spack/test/stage.py4
-rw-r--r--lib/spack/spack/test/versions.py39
-rw-r--r--lib/spack/spack/version.py27
33 files changed, 914 insertions, 344 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index d8d5a6a23a..90ba1164e6 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -114,13 +114,13 @@ that the packages is installed:
$ spack install mpileaks
==> Installing mpileaks
- ==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
- ==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
- ==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
+ ==> mpich is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4.
+ ==> callpath is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318.
+ ==> adept-utils is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
- ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23.
+ ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23/mpileaks-1.0.tar.gz
+ ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23.
==> No patches needed for mpileaks.
==> Building mpileaks.
@@ -128,7 +128,7 @@ that the packages is installed:
==> Successfully installed mpileaks.
Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
- [+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
+ [+] /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpileaks@1.0-59f6ad23
The last line, with the ``[+]``, indicates where the package is
installed.
@@ -183,7 +183,7 @@ To uninstall a package and every package that depends on it, you may give the
spack uninstall --dependents mpich
-will display a list of all the packages that depends on `mpich` and, upon confirmation,
+will display a list of all the packages that depend on `mpich` and, upon confirmation,
will uninstall them in the right order.
A line like
@@ -230,7 +230,7 @@ Running ``spack find`` with no arguments lists installed packages:
$ spack find
==> 74 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
@@ -256,7 +256,7 @@ Running ``spack find`` with no arguments lists installed packages:
lcms@2.6 pixman@0.32.6 xz@5.2.0
libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
- -- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.9.2 --------------------------------
libelf@0.8.10 mpich@3.0.4
Packages are divided into groups according to their architecture and
@@ -279,7 +279,7 @@ in more detail using ``spack find -d``, and by asking only to show
$ spack find --deps libdwarf
==> 2 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
^libelf@0.8.12
libdwarf@20130729-b52fac98
@@ -295,7 +295,7 @@ want to know whether two packages' dependencies differ, you can use
$ spack find -l libdwarf
==> 2 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
Now the ``libwarf`` installs have hashes after their names. These are
@@ -309,14 +309,14 @@ use ``spack find -p``:
$ spack find -p
==> 74 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
- adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
- atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
- boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
- bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
- cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
- callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
+ adept-utils@1.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da
+ atk@2.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/atk@2.14.0-3d09ac09
+ boost@1.55.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/boost@1.55.0
+ bzip2@1.0.6 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/bzip2@1.0.6
+ cairo@1.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/cairo@1.14.0-fcc2ab44
+ callpath@1.0.2 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318
...
And, finally, you can restrict your search to a particular package
@@ -325,10 +325,10 @@ by supplying its name:
.. code-block:: sh
$ spack find -p libelf
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
- libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
- libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ libelf@0.8.11 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.11
+ libelf@0.8.12 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.12
+ libelf@0.8.13 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
@@ -338,7 +338,7 @@ package. If you want to find only libelf versions greater than version
.. code-block:: sh
$ spack find libelf@0.8.12:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libelf@0.8.12 libelf@0.8.13
Finding just the versions of libdwarf built with a particular version
@@ -348,7 +348,7 @@ of libelf would look like this:
$ spack find -l libdwarf ^libelf@0.8.12
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
We can also search for packages that have a certain attribute. For example,
@@ -359,6 +359,7 @@ will find every installed package with a 'debug' compile-time option enabled.
The full spec syntax is discussed in detail in :ref:`sec-specs`.
+.. _compiler-config:
Compiler configuration
-----------------------------------
@@ -445,7 +446,7 @@ If you want to see specifics on a particular compiler, you can run
fc = /usr/local/bin/ifort-15.0.090
This shows which C, C++, and Fortran compilers were detected by Spack.
-Notice also that we didn't have to be too specific about the
+Notice also that we didn\'t have to be too specific about the
version. We just said ``intel@15``, and information about the only
matching Intel compiler was displayed.
@@ -460,19 +461,17 @@ editing your ``~/.spack/compilers.yaml`` file. You can do this by running
Each compiler configuration in the file looks like this::
...
- chaos_5_x86_64_ib:
- ...
- intel@15.0.0:
+ compilers:
+ - compiler:
+ modules = []
+ operating_system: OS
+ paths:
cc: /usr/local/bin/icc-15.0.024-beta
cxx: /usr/local/bin/icpc-15.0.024-beta
f77: /usr/local/bin/ifort-15.0.024-beta
fc: /usr/local/bin/ifort-15.0.024-beta
- ...
-The chaos_5_x86_64_ib string is an architecture string, and multiple
-compilers can be listed underneath an architecture. The architecture
-string may be replaced with the string 'all' to signify compilers that
-work on all architectures.
+ spec: intel@15.0.0:
For compilers, like ``clang``, that do not support Fortran, put
``None`` for ``f77`` and ``fc``::
@@ -488,10 +487,11 @@ list displayed by ``spack compilers``.
You can also add compiler flags to manually configured compilers. The
valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``,
-``ldflags``, and ``ldlibs``. For example,::
+``ldflags``, and ``ldlibs``. For example::
...
- chaos_5_x86_64_ib:
+ compilers:
+ - compiler:
...
intel@15.0.0:
cc: /usr/local/bin/icc-15.0.024-beta
@@ -518,10 +518,10 @@ Spack, that descriptor is called a *spec*. Spack uses specs to refer
to a particular build configuration (or configurations) of a package.
Specs are more than a package name and a version; you can use them to
specify the compiler, compiler version, architecture, compile options,
-and dependency options for a build. In this section, we'll go over
+and dependency options for a build. In this section, we\'ll go over
the full syntax of specs.
-Here is an example of a much longer spec than we've seen thus far::
+Here is an example of a much longer spec than we\'ve seen thus far::
mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
@@ -546,8 +546,8 @@ More formally, a spec consists of the following pieces:
boolean variants
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
-* ``arch=<value>`` Optional architecture specifier (``arch=bgq_os``)
-* ``^`` Dependency specs (``^callpath@1.1``)
+* ``target=<value> os=<value>`` Optional architecture specifier
+(``target=haswell os=CNL10``) * ``^`` Dependency specs (``^callpath@1.1``)
There are two things to notice here. The first is that specs are
recursively defined. That is, each dependency after ``^`` is a spec
@@ -626,7 +626,7 @@ compilers, variants, and architectures just like any other spec.
Specifiers are associated with the nearest package name to their left.
For example, above, ``@1.1`` and ``%gcc@4.7.2`` associates with the
``callpath`` package, while ``@1.2:1.4``, ``%gcc@4.7.5``, ``+debug``,
-``-qt``, and ``arch=bgq_os`` all associate with the ``mpileaks`` package.
+``-qt``, and ``target=haswell os=CNL10`` all associate with the ``mpileaks`` package.
In the diagram above, ``mpileaks`` depends on ``mpich`` with an
unspecified version, but packages can depend on other packages with
@@ -758,14 +758,18 @@ in gnu autotools. If all flags are set, the order is
Architecture specifiers
~~~~~~~~~~~~~~~~~~~~~~~
-.. Note::
+The architecture can be specified by using the reserved
+words ``target`` and/or ``os`` (``target=x86-64 os=debian7``). You can also
+use the triplet form of platform, operating system and processor.
- Architecture specifiers are part of specs but are not yet
- functional. They will be in Spack version 1.0, due in Q3 2015.
+.. code-block:: sh
+
+ spack install libelf arch=cray_xc-CNL10-haswell
-The architecture specifier looks identical to a variant specifier for a
-non-boolean variant. The architecture can be specified only using the
-reserved name ``arch`` (``arch=bgq_os``).
+Users on non-Cray systems won't have to worry about specifying the architecture.
+Spack will autodetect what kind of operating system is on your machine as well
+as the processor. For more information on how the architecture can be
+used on Cray machines, check here :ref:`spack-cray`
.. _sec-virtual-dependencies:
@@ -985,7 +989,7 @@ of installed packages.
$ module avail
- ------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
+ ------- /home/gamblin2/spack/share/spack/modules/linux-debian7-x86_64 --------
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
@@ -1056,7 +1060,7 @@ Spack. For example, this will add the ``mpich`` package built with
$ spack use mpich %gcc@4.4.7
Prepending: mpich@3.0.4%gcc@4.4.7 (ok)
$ which mpicc
- ~/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4/bin/mpicc
+ ~/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4/bin/mpicc
Or, similarly with modules, you could type:
@@ -1089,8 +1093,8 @@ than one installed package matches it), then Spack will warn you:
$ spack load libelf
==> Error: Multiple matches for spec libelf. Choose one:
- libelf@0.8.13%gcc@4.4.7 arch=chaos_5_x86_64_ib
- libelf@0.8.13%intel@15.0.0 arch=chaos_5_x86_64_ib
+ libelf@0.8.13%gcc@4.4.7 arch=linux-debian7-x86_64
+ libelf@0.8.13%intel@15.0.0 arch=linux-debian7-x86_64
You can either type the ``spack load`` command again with a fully
qualified argument, or you can add just enough extra constraints to
@@ -1150,7 +1154,7 @@ Modules may be loaded recursively with the command:
More than one spec may be placed on the command line here.
-Module Comamnds for Shell Scripts
+Module Commands for Shell Scripts
``````````````````````````````````
Although Spack is flexbile, the ``module`` command is much faster.
@@ -1543,7 +1547,7 @@ an *extension*. Suppose you have Python installed like so:
$ spack find python
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
python@2.7.8
.. _spack-extensions:
@@ -1556,7 +1560,7 @@ You can find extensions for your Python installation like this:
.. code-block:: sh
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1568,7 +1572,7 @@ You can find extensions for your Python installation like this:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
@@ -1584,8 +1588,8 @@ prefixes, and you can see this with ``spack find -p``:
$ spack find -p py-numpy
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
@@ -1646,9 +1650,9 @@ installation:
.. code-block:: sh
$ spack activate py-numpy
- ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
Several things have happened here. The user requested that
``py-numpy`` be activated in the ``python`` installation it was built
@@ -1663,7 +1667,7 @@ packages listed as activated:
.. code-block:: sh
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1675,14 +1679,14 @@ packages listed as activated:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> 3 currently activated:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
@@ -1711,7 +1715,7 @@ dependencies, you can use ``spack activate -f``:
.. code-block:: sh
$ spack activate -f py-numpy
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
.. _spack-deactivate:
@@ -1743,7 +1747,7 @@ Spack currently needs to be run from a filesystem that supports
``flock`` locking semantics. Nearly all local filesystems and recent
versions of NFS support this, but parallel filesystems may be mounted
without ``flock`` support enabled. You can determine how your
-filesystems are mounted with ``mount -p``. The output for a Lustre
+ filesystems are mounted with ``mount -p``. The output for a Lustre
filesystem might look like this:
.. code-block:: sh
@@ -1764,7 +1768,7 @@ This issue typically manifests with the error below:
Traceback (most recent call last):
File "./spack", line 176, in <module>
main()
- File "./spack", line 154, in main
+ File "./spack", line 154,' in main
return_val = command(parser, args)
File "./spack/lib/spack/spack/cmd/find.py", line 170, in find
specs = set(spack.installed_db.query(**q_args))
@@ -1782,6 +1786,142 @@ This issue typically manifests with the error below:
A nicer error message is TBD in future versions of Spack.
+
+.. _spack-cray:
+
+Spack on Cray
+-----------------------------
+
+Spack differs slightly when used on a Cray system. The architecture spec
+can differentiate between the front-end and back-end processor and operating system.
+For example, on Edison at NERSC, the back-end target processor
+is \"Ivy Bridge\", so you can specify to use the back-end this way:
+
+.. code-block:: sh
+
+ spack install zlib target=ivybridge
+
+You can also use the operating system to build against the back-end:
+
+.. code-block:: sh
+
+ spack install zlib os=CNL10
+
+Notice that the name includes both the operating system name and the major
+version number concatenated together.
+
+Alternatively, if you want to build something for the front-end,
+you can specify the front-end target processor. The processor for a login node
+on Edison is \"Sandy bridge\" so we specify on the command line like so:
+
+.. code-block:: sh
+
+ spack install zlib target=sandybridge
+
+And the front-end operating system is:
+
+.. code-block:: sh
+
+ spack install zlib os=SuSE11
+
+
+
+Cray compiler detection
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Spack can detect compilers using two methods. For the front-end, we treat
+everything the same. The difference lies in back-end compiler detection.
+Back-end compiler detection is made via the Tcl module avail command.
+Once it detects the compiler it writes the appropriate PrgEnv and compiler
+module name to compilers.yaml and sets the paths to each compiler with Cray\'s
+compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
+the correct PrgEnv and compiler module and will call appropriate wrapper.
+
+The compilers.yaml config file will also differ. There is a
+modules section that is filled with the compiler\'s Programming Environment
+and module name. On other systems, this field is empty []::
+
+ ...
+ - compiler:
+ modules:
+ - PrgEnv-intel
+ - intel/15.0.109
+ ...
+
+As mentioned earlier, the compiler paths will look different on a Cray system.
+Since most compilers are invoked using cc, CC and ftn, the paths for each
+compiler are replaced with their respective Cray compiler wrapper names::
+
+ ...
+ paths:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+ ...
+
+As opposed to an explicit path to the compiler executable. This allows Spack
+to call the Cray compiler wrappers during build time.
+
+For more on compiler configuration, check out :ref:`compiler-config`.
+
+Setting defaults and using Cray modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you want to use default compilers for each PrgEnv and also be able
+to load cray external modules, you will need to set up a packages.yaml.
+
+Here\'s an example of an external configuration for cray modules:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ modules:
+ mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich
+ mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich
+
+This tells Spack that for whatever package that depends on mpi, load the
+cray-mpich module into the environment. You can then be able to use whatever
+environment variables, libraries, etc, that are brought into the environment
+via module load.
+
+You can set the default compiler that Spack can use for each compiler type.
+If you want to use the Cray defaults, then set them under *all:* in packages.yaml.
+In the compiler field, set the compiler specs in your order of preference.
+Whenever you build with that compiler type, Spack will concretize to that version.
+
+Here is an example of a full packages.yaml used at NERSC
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ modules:
+ mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich
+ mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich
+ buildable: False
+ netcdf:
+ modules:
+ netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf
+ netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf
+ buildable: False
+ hdf5:
+ modules:
+ hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5
+ hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5
+ buildable: False
+ all:
+ compiler: [gcc@5.2.0, intel@16.0.0.109]
+
+Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
+if we want to build with intel compilers, use version 16.0.0.109. We add a spec
+for each compiler type for each cray modules. This ensures that for each
+compiler on our system we can use that external module.
+
+
+For more on external packages check out the section :ref:`sec-external_packages`.
+
Getting Help
-----------------------
diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst
index a6f876b2aa..f2ffa07264 100644
--- a/lib/spack/docs/configuration.rst
+++ b/lib/spack/docs/configuration.rst
@@ -53,6 +53,7 @@ in the first directory it finds to which it has write access. Add
more elements to the list to indicate where your own site's temporary
directory is.
+.. _sec-external_packages:
External Packages
----------------------------
@@ -70,20 +71,20 @@ directory. Here's an example of an external configuration:
packages:
openmpi:
paths:
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
This example lists three installations of OpenMPI, one built with gcc,
one built with gcc and debug information, and another built with Intel.
If Spack is asked to build a package that uses one of these MPIs as a
dependency, it will use the the pre-installed OpenMPI in
-the given directory.
+the given directory. Packages.yaml can also be used to specify modules
Each ``packages.yaml`` begins with a ``packages:`` token, followed
-by a list of package names. To specify externals, add a ``paths``
+by a list of package names. To specify externals, add a ``paths`` or ``modules``
token under the package name, which lists externals in a
-``spec : /path`` format. Each spec should be as
+``spec: /path`` or ``spec: module-name`` format. Each spec should be as
well-defined as reasonably possible. If a
package lacks a spec component, such as missing a compiler or
package version, then Spack will guess the missing component based
@@ -108,9 +109,9 @@ be:
packages:
openmpi:
paths:
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
buildable: False
The addition of the ``buildable`` flag tells Spack that it should never build
@@ -118,6 +119,9 @@ its own version of OpenMPI, and it will instead always rely on a pre-built
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
a package name.
+If an external module is specified as not buildable, then Spack will load the
+external module into the build environment which can be used for linking.
+
The ``buildable`` does not need to be paired with external packages.
It could also be used alone to forbid packages that may be
buggy or otherwise undesirable.
@@ -181,7 +185,6 @@ concretization rules. A provider lists a value that packages may
dependency.
-
Profiling
------------------
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 6cea4da14f..a0d0feabbd 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -36,10 +36,11 @@ Creating & editing packages
``spack create``
~~~~~~~~~~~~~~~~~~~~~
-The ``spack create`` command generates a boilerplate package template
-from a URL. The URL should point to a tarball or other software
-archive. In most cases, ``spack create`` plus a few modifications is
-all you need to get a package working.
+The ``spack create`` command creates a directory with the package name and
+generates a ``package.py`` file with a boilerplate package template from a URL.
+The URL should point to a tarball or other software archive. In most cases,
+``spack create`` plus a few modifications is all you need to get a package
+working.
Here's an example:
@@ -47,12 +48,16 @@ Here's an example:
$ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
-Spack examines the tarball URL and tries to figure out the name of the
-package to be created. It also tries to determine what version strings
-look like for this package. Using this information, it will try to
-find *additional* versions by spidering the package's webpage. If it
-finds multiple versions, Spack prompts you to tell it how many
-versions you want to download and checksum:
+Spack examines the tarball URL and tries to figure out the name of the package
+to be created. Once the name is determined a directory in the appropriate
+repository is created with that name. Spack prefers, but does not require, that
+names be lower case so the directory name will be lower case when ``spack
+create`` generates it. In cases where it is desired to have mixed case or upper
+case simply rename the directory. Spack also tries to determine what version
+strings look like for this package. Using this information, it will try to find
+*additional* versions by spidering the package's webpage. If it finds multiple
+versions, Spack prompts you to tell it how many versions you want to download
+and checksum:
.. code-block:: sh
@@ -297,9 +302,10 @@ directories or files (like patches) that it needs to build.
Package Names
~~~~~~~~~~~~~~~~~~
-Packages are named after the directory containing ``package.py``. So,
-``libelf``'s ``package.py`` lives in a directory called ``libelf``.
-The ``package.py`` file defines a class called ``Libelf``, which
+Packages are named after the directory containing ``package.py``. It is
+preferred, but not required, that the directory, and thus the package name, are
+lower case. So, ``libelf``'s ``package.py`` lives in a directory called
+``libelf``. The ``package.py`` file defines a class called ``Libelf``, which
extends Spack's ``Package`` class. for example, here is
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
@@ -562,7 +568,7 @@ The package author is responsible for coming up with a sensible name
for each version to be fetched from a repository. For example, if
you're fetching from a tag like ``v1.0``, you might call that ``1.0``.
If you're fetching a nameless git commit or an older subversion
-revision, you might give the commit an intuitive name, like ``dev``
+revision, you might give the commit an intuitive name, like ``develop``
for a development version, or ``some-fancy-new-feature`` if you want
to be more specific.
@@ -572,6 +578,17 @@ branches move forward over time and you aren't guaranteed to get the
same thing every time you fetch a particular version. Life isn't
always simple, though, so this is not strictly enforced.
+When fetching from from the branch corresponding to the development version
+(often called ``master``,``trunk`` or ``dev``), it is recommended to
+call this version ``develop``. Spack has special treatment for this version so
+ that ``@develop`` will satisfy dependencies like
+``depends_on(abc, when="@x.y.z:")``. In other words, ``@develop`` is
+greater than any other version. The rationale is that certain features or
+options first appear in the development branch. Therefore if a package author
+wants to keep the package on the bleeding edge and provide support for new
+features, it is advised to use ``develop`` for such a version which will
+greatly simplify writing dependencies and version-related conditionals.
+
In some future release, Spack may support extrapolating repository
versions as it does for tarball URLs, but currently this is not
supported.
@@ -587,6 +604,7 @@ Git fetching is enabled with the following parameters to ``version``:
* ``tag``: name of a tag to fetch.
* ``branch``: name of a branch to fetch.
* ``commit``: SHA hash (or prefix) of a commit to fetch.
+ * ``submodules``: Also fetch submodules when checking out this repository.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -597,7 +615,7 @@ Default branch
class Example(Package):
...
- version('dev', git='https://github.com/example-project/example.git')
+ version('develop', git='https://github.com/example-project/example.git')
This is not recommended, as the contents of the default branch
change over time.
@@ -643,6 +661,17 @@ Commits
could just use the abbreviated commit hash. It's up to the package
author to decide what makes the most sense.
+Submodules
+
+ You can supply ``submodules=True`` to cause Spack to fetch submodules
+ along with the repository at fetch time.
+
+ .. code-block:: python
+
+ version('1.0.1', git='https://github.com/example-project/example.git',
+ tag='v1.0.1', submdoules=True)
+
+
Installing
^^^^^^^^^^^^^^
@@ -670,7 +699,7 @@ Default
.. code-block:: python
- version('hg-head', hg='https://jay.grs.rwth-aachen.de/hg/example')
+ version('develop', hg='https://jay.grs.rwth-aachen.de/hg/example')
Note that this is not recommended; try to fetch a particular
revision instead.
@@ -702,7 +731,7 @@ Fetching the head
.. code-block:: python
- version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
+ version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
This is not recommended, as the head will move forward over time.
@@ -712,7 +741,7 @@ Fetching a revision
.. code-block:: python
- version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
+ version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
revision=128)
Subversion branches are handled as part of the directory structure, so
@@ -1257,6 +1286,31 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.
+Additionally, dependencies may be specified for specific use cases:
+
+.. code-block:: python
+
+ depends_on("cmake", type="build")
+ depends_on("libelf", type=("build", "link"))
+ depends_on("python", type="run")
+
+The dependency types are:
+
+ * **"build"**: made available during the project's build. The package will
+ be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
+ Other projects which depend on this one will not have these modified
+ (building project X doesn't need project Y's build dependencies).
+ * **"link"**: the project is linked to by the project. The package will be
+ added to the current package's ``rpath``.
+ * **"run"**: the project is used by the project at runtime. The package will
+ be added to ``PATH`` and ``PYTHONPATH``.
+
+If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
+common case for compiled language usage. Also available are the aliases
+``alldeps`` for all dependency types and ``nolink`` (``("build", "run")``) for
+use by dependencies which are not expressed via a linker (e.g., Python or Lua
+module loading).
+
.. _setup-dependent-environment:
``setup_dependent_environment()``
@@ -1660,21 +1714,21 @@ the user runs ``spack install`` and the time the ``install()`` method
is called. The concretized version of the spec above might look like
this::
- mpileaks@2.3%gcc@4.7.3 arch=linux-ppc64
- ^callpath@1.0%gcc@4.7.3+debug arch=linux-ppc64
- ^dyninst@8.1.2%gcc@4.7.3 arch=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
- ^libelf@0.8.11%gcc@4.7.3 arch=linux-ppc64
- ^mpich@3.0.4%gcc@4.7.3 arch=linux-ppc64
+ mpileaks@2.3%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^callpath@1.0%gcc@4.7.3+debug arch=linux-debian7-x86_64
+ ^dyninst@8.1.2%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libelf@0.8.11%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^mpich@3.0.4%gcc@4.7.3 arch=linux-debian7-x86_64
.. graphviz::
digraph {
- "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
- "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
- "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
}
Here, all versions, compilers, and platforms are filled in, and there
@@ -1703,9 +1757,9 @@ running ``spack spec``. For example:
^libdwarf
^libelf
- dyninst@8.0.1%gcc@4.7.3 arch=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
- ^libelf@0.8.13%gcc@4.7.3 arch=linux-ppc64
+ dyninst@8.0.1%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libelf@0.8.13%gcc@4.7.3 arch=linux-debian7-x86_64
This is useful when you want to know exactly what Spack will do when
you ask for a particular spec.
@@ -2210,12 +2264,12 @@ example:
def install(self, prefix):
# Do default install
- @when('arch=chaos_5_x86_64_ib')
+ @when('arch=linux-debian7-x86_64')
def install(self, prefix):
# This will be executed instead of the default install if
# the package's sys_type() is chaos_5_x86_64_ib.
- @when('arch=bgqos_0")
+ @when('arch=linux-debian7-x86_64")
def install(self, prefix):
# This will be executed if the package's sys_type is bgqos_0
@@ -2343,7 +2397,7 @@ build system.
.. _sanity-checks:
-Sanity checking an intallation
+Sanity checking an installation
--------------------------------
By default, Spack assumes that a build has failed if nothing is
@@ -2706,15 +2760,15 @@ build process will start from scratch.
``spack purge``
~~~~~~~~~~~~~~~~~
-Cleans up all of Spack's temporary and cached files. This can be used to
-recover disk space if temporary files from interrupted or failed installs
-accumulate in the staging area.
+Cleans up all of Spack's temporary and cached files. This can be used to
+recover disk space if temporary files from interrupted or failed installs
+accumulate in the staging area.
When called with ``--stage`` or ``--all`` (or without arguments, in which case
-the default is ``--all``) this removes all staged files; this is equivalent to
+the default is ``--all``) this removes all staged files; this is equivalent to
running ``spack clean`` for every package you have fetched or staged.
-When called with ``--cache`` or ``--all`` this will clear all resources
+When called with ``--cache`` or ``--all`` this will clear all resources
:ref:`cached <caching>` during installs.
Keeping the stage directory on success
@@ -2863,11 +2917,11 @@ build it:
$ spack stage libelf
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13.tar.gz
- ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64.
+ ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13.tar.gz
+ ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64.
$ spack cd libelf
$ pwd
- /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13
+ /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13
``spack cd`` here changed he current working directory to the
directory containing the expanded ``libelf`` source code. There are a
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 20c9934704..d67585aac4 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -177,10 +177,11 @@ sys_type = None
# should live. This file is overloaded for spack core vs. for packages.
#
__all__ = ['Package', 'StagedPackage', 'CMakePackage', \
- 'Version', 'when', 'ver']
+ 'Version', 'when', 'ver', 'alldeps', 'nolink']
from spack.package import Package, ExtensionConflictError
from spack.package import StagedPackage, CMakePackage
from spack.version import Version, ver
+from spack.spec import DependencySpec, alldeps, nolink
from spack.multimethod import when
import llnl.util.filesystem
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index fe5186a7d7..93fb0690f7 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -254,7 +254,8 @@ def set_build_environment_variables(pkg, env, dirty=False):
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
- dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
+ dep_prefixes = [d.prefix
+ for d in pkg.spec.traverse(root=False, deptype='build')]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
# Add dependencies to CMAKE_PREFIX_PATH
env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
@@ -337,10 +338,6 @@ def set_module_variables_for_package(pkg, module):
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
- # TODO: shouldn't really use "which" here. Consider adding notion
- # TODO: of build dependencies, as opposed to link dependencies.
- # TODO: Currently, everything is a link dependency, but tools like
- # TODO: this shouldn't be.
m.cmake = Executable('cmake')
m.ctest = Executable('ctest')
@@ -388,9 +385,10 @@ def set_module_variables_for_package(pkg, module):
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
- rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
+ deps = pkg.spec.dependencies(deptype='link')
+ rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
+ rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
# Second module is our compiler mod name. We use that to get rpaths from
# module show output.
@@ -449,7 +447,7 @@ def setup_package(pkg, dirty=False):
load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
- for dspec in pkg.spec.traverse(order='post', root=False):
+ for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
# If a user makes their own package repo, e.g.
# spack.repos.mystuff.libelf.Libelf, and they inherit from
# an existing class like spack.repos.original.libelf.Libelf,
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index f5f234d7a7..18c748e483 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -325,7 +325,7 @@ def create(parser, args):
# Figure out a name and repo for the package.
name, version = guess_name_and_version(url, args)
spec = Spec(name)
- name = spec.name # factors out namespace, if any
+ name = spec.name.lower() # factors out namespace, if any
repo = find_repository(spec, args)
tty.msg("This looks like a URL for %s version %s" % (name, version))
diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py
index 39faf59a17..643e6374b2 100644
--- a/lib/spack/spack/cmd/diy.py
+++ b/lib/spack/spack/cmd/diy.py
@@ -50,7 +50,7 @@ def setup_parser(subparser):
help="Do not display verbose build output while installing.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
- help="specs to use for install. Must contain package AND verison.")
+ help="specs to use for install. Must contain package AND version.")
def diy(self, args):
diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py
index e40caaa234..1afc51d9fa 100644
--- a/lib/spack/spack/cmd/fetch.py
+++ b/lib/spack/spack/cmd/fetch.py
@@ -51,7 +51,7 @@ def fetch(parser, args):
for spec in specs:
if args.missing or args.dependencies:
to_fetch = set()
- for s in spec.traverse():
+ for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:
continue
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 64d0d20e24..5774034062 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -81,12 +81,14 @@ def print_text_info(pkg):
print " " + fmt % (name, default, desc)
- print
- print "Dependencies:"
- if pkg.dependencies:
- colify(pkg.dependencies, indent=4)
- else:
- print " None"
+ for deptype in ('build', 'link', 'run'):
+ print
+ print "%s Dependencies:" % deptype.capitalize()
+ deps = pkg.dependencies(deptype)
+ if deps:
+ colify(deps, indent=4)
+ else:
+ print " None"
print
print "Virtual packages: "
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 3133e080d7..4c076322a9 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -58,6 +58,10 @@ def setup_parser(subparser):
help="Install a package *without* cleaning the environment.")
subparser.add_argument(
'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
+ subparser.add_argument(
+ '--run-tests', action='store_true', dest='run_tests',
+ help="Run tests during installation of a package.")
+
def install(parser, args):
@@ -80,6 +84,7 @@ def install(parser, args):
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps,
make_jobs=args.jobs,
+ run_tests=args.run_tests,
verbose=args.verbose,
fake=args.fake,
dirty=args.dirty,
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index d5f7abe212..0cf682fc4f 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -179,7 +179,7 @@ def mirror_create(args):
new_specs = set()
for spec in specs:
spec.concretize()
- for s in spec.traverse():
+ for s in spec.traverse(deptype_query=spack.alldeps):
new_specs.add(s)
specs = list(new_specs)
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 3cefb512c2..55826d133c 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -87,7 +87,7 @@ def module_find(mtype, flags, spec_array):
return
if flags.recurse_dependencies:
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies():
_find_modules(dep, modules_list)
mod = module_types[mtype](spec)
@@ -108,7 +108,7 @@ def module_find(mtype, flags, spec_array):
tty.die("No installed packages match spec %s" % raw_spec)
if len(specs) > 1:
- tty.error("Multiple matches for spec %s. Choose one:" % spec)
+ tty.error("Multiple matches for spec %s. Choose one:" % raw_spec)
for s in specs:
sys.stderr.write(s.tree(color=True))
sys.exit(1)
diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py
index 6c5c4ae8c6..bc64c77eab 100644
--- a/lib/spack/spack/cmd/package-list.py
+++ b/lib/spack/spack/cmd/package-list.py
@@ -80,11 +80,15 @@ def print_rst_package_list():
if pkg.versions:
print "Versions:"
print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
- if pkg.dependencies:
- print "Dependencies"
- print " " + ", ".join("`%s`_" % d if d != "mpi" else d
- for d in pkg.dependencies)
- print
+
+ for deptype in ('build', 'link', 'run'):
+ deps = pkg.dependencies(deptype)
+ if deps:
+ print "%s Dependencies" % deptype.capitalize()
+ print " " + ", ".join("`%s`_" % d if d != "mpi" else d
+ for d in build_deps)
+ print
+
print "Description:"
print pkg.format_doc(indent=2)
print
diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py
index 02e9bfd281..04f3d663df 100644
--- a/lib/spack/spack/cmd/setup.py
+++ b/lib/spack/spack/cmd/setup.py
@@ -44,7 +44,7 @@ def setup_parser(subparser):
help="Display verbose build output while installing.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
- help="specs to use for install. Must contain package AND verison.")
+ help="specs to use for install. Must contain package AND version.")
def setup(self, args):
diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py
index 45592a7dda..14c06d136d 100644
--- a/lib/spack/spack/cmd/test-install.py
+++ b/lib/spack/spack/cmd/test-install.py
@@ -133,7 +133,12 @@ def fetch_log(path):
def failed_dependencies(spec):
- return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
+ def get_deps(deptype):
+ return set(item for item in spec.dependencies(deptype)
+ if not spack.repo.get(item).installed)
+ link_deps = get_deps('link')
+ run_deps = get_deps('run')
+ return link_deps.union(run_deps)
def get_top_spec_or_die(args):
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index f792008c49..386df08b2e 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -103,7 +103,7 @@ class DefaultConcretizer(object):
usable.sort(cmp=cmp_externals)
return usable
-
+ # XXX(deptypes): Look here.
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
@@ -166,7 +166,11 @@ class DefaultConcretizer(object):
valid_versions.sort(key=prefer_key, reverse=True)
if valid_versions:
- spec.versions = ver([valid_versions[0]])
+ # Disregard @develop and take the next valid version
+ if ver(valid_versions[0]) == ver('develop') and len(valid_versions) > 1:
+ spec.versions = ver([valid_versions[1]])
+ else:
+ spec.versions = ver([valid_versions[0]])
else:
# We don't know of any SAFE versions that match the given
# spec. Grab the spec's versions and grab the highest
@@ -390,8 +394,10 @@ def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
- dagiter = chain(spec.traverse(direction='parents', root=False),
- spec.traverse(direction='children', root=False))
+ deptype = ('build', 'link')
+ dagiter = chain(
+ spec.traverse(direction='parents', deptype=deptype, root=False),
+ spec.traverse(direction='children', deptype=deptype, root=False))
visited = set()
for relative in dagiter:
if condition(relative):
@@ -399,7 +405,7 @@ def find_spec(spec, condition):
visited.add(id(relative))
# Then search all other relatives in the DAG *except* spec
- for relative in spec.root.traverse():
+ for relative in spec.root.traverse(deptypes=spack.alldeps):
if relative is spec: continue
if id(relative) in visited: continue
if condition(relative):
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index a4bbff3d5a..c95abd7423 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -215,9 +215,14 @@ class Database(object):
# Add dependencies from other records in the install DB to
# form a full spec.
if 'dependencies' in spec_dict[spec.name]:
- for dep_hash in spec_dict[spec.name]['dependencies'].values():
+ for dep in spec_dict[spec.name]['dependencies'].values():
+ if type(dep) == tuple:
+ dep_hash, deptypes = dep
+ else:
+ dep_hash = dep
+ deptypes = spack.alldeps
child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
- spec._add_dependency(child)
+ spec._add_dependency(child, deptypes)
# Specs from the database need to be marked concrete because
# they represent actual installations.
@@ -334,7 +339,10 @@ class Database(object):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
- for dep in rec.spec.dependencies.values():
+ # XXX(deptype): This checks all dependencies, but build
+ # dependencies might be able to be dropped in the
+ # future.
+ for dep in rec.spec.dependencies():
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@@ -406,7 +414,7 @@ class Database(object):
else:
self._data[key] = InstallRecord(spec, path, True,
explicit=explicit)
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies(('link', 'run')):
self._increment_ref_count(dep, directory_layout)
def _increment_ref_count(self, spec, directory_layout=None):
@@ -421,7 +429,7 @@ class Database(object):
self._data[key] = InstallRecord(spec.copy(), path, installed)
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies('link'):
self._increment_ref_count(dep)
self._data[key].ref_count += 1
@@ -466,7 +474,7 @@ class Database(object):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies('link'):
self._decrement_ref_count(dep)
def _remove(self, spec):
@@ -480,7 +488,7 @@ class Database(object):
return rec.spec
del self._data[key]
- for dep in rec.spec.dependencies.values():
+ for dep in rec.spec.dependencies('link'):
self._decrement_ref_count(dep)
# Returns the concrete spec so we know it in the case where a
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index ca8f21dc08..88d2aaf472 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -171,7 +171,7 @@ def version(pkg, ver, checksum=None, **kwargs):
pkg.versions[Version(ver)] = kwargs
-def _depends_on(pkg, spec, when=None):
+def _depends_on(pkg, spec, when=None, type=None):
# If when is False do nothing
if when is False:
return
@@ -180,10 +180,29 @@ def _depends_on(pkg, spec, when=None):
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
+ if type is None:
+ # The default deptype is build and link because the common case is to
+ # build against a library which then turns into a runtime dependency
+ # due to the linker.
+ # XXX(deptype): Add 'run' to this? It's an uncommon dependency type,
+ # but is most backwards-compatible.
+ type = ('build', 'link')
+
+ if isinstance(type, str):
+ type = (type,)
+
+ for deptype in type:
+ if deptype not in spack.spec.alldeps:
+ raise UnknownDependencyTypeError('depends_on', pkg.name, deptype)
+
dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
+ pkg_deptypes = pkg._deptypes.setdefault(dep_spec.name, set())
+ for deptype in type:
+ pkg_deptypes.add(deptype)
+
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
@@ -191,13 +210,13 @@ def _depends_on(pkg, spec, when=None):
conditions[when_spec] = dep_spec
-@directive('dependencies')
-def depends_on(pkg, spec, when=None):
+@directive(('dependencies', '_deptypes'))
+def depends_on(pkg, spec, when=None, type=None):
"""Creates a dict of deps with specs defining when they apply."""
- _depends_on(pkg, spec, when=when)
+ _depends_on(pkg, spec, when=when, type=type)
-@directive(('extendees', 'dependencies'))
+@directive(('extendees', 'dependencies', '_deptypes'))
def extends(pkg, spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
@@ -326,3 +345,13 @@ class CircularReferenceError(DirectiveError):
directive,
"Package '%s' cannot pass itself to %s" % (package, directive))
self.package = package
+
+
+class UnknownDependencyTypeError(DirectiveError):
+ """This is raised when a dependency is of an unknown type."""
+ def __init__(self, directive, package, deptype):
+ super(UnknownDependencyTypeError, self).__init__(
+ directive,
+ "Package '%s' cannot depend on a package via %s." %
+ (package, deptype))
+ self.package = package
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index ee13e2dcbc..a5e76043ad 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -285,7 +285,7 @@ class YamlDirectoryLayout(DirectoryLayout):
return path
if spec.dag_hash() == installed_spec.dag_hash():
- raise SpecHashCollisionError(installed_hash, spec_hash)
+ raise SpecHashCollisionError(spec, installed_spec)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match hash!' % spec_file_path)
@@ -431,7 +431,7 @@ class SpecHashCollisionError(DirectoryLayoutError):
def __init__(self, installed_spec, new_spec):
super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!'
- % installed_spec, new_spec)
+ % (installed_spec, new_spec))
class RemoveFailedError(DirectoryLayoutError):
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 38133ee0ca..bcb33bd0e6 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -366,8 +366,8 @@ class CacheURLFetchStrategy(URLFetchStrategy):
try:
self.check()
except ChecksumError:
- # Future fetchers will assume they don't need to download if the
- # file remains
+ # Future fetchers will assume they don't need to
+ # download if the file remains
os.remove(self.archive_file)
raise
@@ -517,6 +517,7 @@ class GitFetchStrategy(VCSFetchStrategy):
super(GitFetchStrategy, self).__init__(
'git', 'tag', 'branch', 'commit', **forwarded_args)
self._git = None
+ self.submodules = kwargs.get('submodules', False)
@property
def git_version(self):
@@ -595,6 +596,10 @@ class GitFetchStrategy(VCSFetchStrategy):
self.git('pull', '--tags', ignore_errors=1)
self.git('checkout', self.tag)
+ # Init submodules if the user asked for them.
+ if self.submodules:
+ self.git('submodule', 'update', '--init')
+
def archive(self, destination):
super(GitFetchStrategy, self).archive(destination, exclude='.git')
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 22058d41d8..063e4647b6 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -80,12 +80,14 @@ def topological_sort(spec, **kwargs):
"""
reverse = kwargs.get('reverse', False)
+ # XXX(deptype): iterate over a certain kind of dependency. Maybe color
+ # edges based on the type of dependency?
if not reverse:
- parents = lambda s: s.dependents
- children = lambda s: s.dependencies
+ parents = lambda s: s.dependents()
+ children = lambda s: s.dependencies()
else:
- parents = lambda s: s.dependencies
- children = lambda s: s.dependents
+ parents = lambda s: s.dependencies()
+ children = lambda s: s.dependents()
# Work on a copy so this is nondestructive.
spec = spec.copy()
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index ce46047fa3..a2e528d295 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -120,7 +120,7 @@ def dependencies(spec, request='all'):
return []
if request == 'direct':
- return [xx for _, xx in spec.dependencies.items()]
+ return spec.dependencies()
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 84bd99df54..7f53b461b2 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -311,6 +311,8 @@ class Package(object):
parallel = True
"""# jobs to use for parallel make. If set, overrides default of ncpus."""
make_jobs = None
+ """By default do not run tests within package's install()"""
+ run_tests = False
"""Most packages are NOT extendable. Set to True if you want extensions."""
extendable = False
"""List of prefix-relative file paths (or a single path). If these do
@@ -575,7 +577,7 @@ class Package(object):
name = next(iter(self.extendees))
# If the extendee is in the spec's deps already, return that.
- for dep in self.spec.traverse():
+ for dep in self.spec.traverse(deptypes=('link', 'run')):
if name == dep.name:
return dep
@@ -640,12 +642,13 @@ class Package(object):
yield self
for name in sorted(self.dependencies.keys()):
- spec = self.dependencies[name]
+ dep_spec = self.get_dependency(name)
+ spec = dep_spec.spec
- # currently, we do not descend into virtual dependencies, as this
+ # Currently, we do not descend into virtual dependencies, as this
# makes doing a sensible traversal much harder. We just assume
# that ANY of the virtual deps will work, which might not be true
- # (due to conflicts or unsatisfiable specs). For now this is ok
+ # (due to conflicts or unsatisfiable specs). For now this is ok,
# but we might want to reinvestigate if we start using a lot of
# complicated virtual dependencies
# TODO: reinvestigate this.
@@ -683,7 +686,9 @@ class Package(object):
for spec in spack.installed_db.query():
if self.name == spec.name:
continue
- for dep in spec.traverse():
+ # XXX(deptype): Should build dependencies not count here?
+ #for dep in spec.traverse(deptype=('run')):
+ for dep in spec.traverse(deptype=spack.alldeps):
if self.spec == dep:
dependents.append(spec)
return dependents
@@ -881,6 +886,7 @@ class Package(object):
skip_patch=False,
verbose=False,
make_jobs=None,
+ run_tests=False,
fake=False,
explicit=False,
dirty=False,
@@ -902,9 +908,10 @@ class Package(object):
verbose -- Display verbose build output (by default, suppresses it)
dirty -- Don't clean the build environment before installing.
make_jobs -- Number of make jobs to use for install. Default is ncpus
+ run_tests -- Runn tests within the package's install()
"""
if not self.spec.concrete:
- raise ValueError("Can only install concrete packages.")
+ raise ValueError("Can only install concrete packages: %s." % self.spec.name)
# No installation needed if package is external
if self.spec.external:
@@ -932,7 +939,11 @@ class Package(object):
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
- make_jobs=make_jobs)
+ make_jobs=make_jobs,
+ run_tests=run_tests)
+
+ # Set run_tests flag before starting build.
+ self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
@@ -1081,7 +1092,7 @@ class Package(object):
def do_install_dependencies(self, **kwargs):
# Pass along paths of dependencies here
- for dep in self.spec.dependencies.values():
+ for dep in self.spec.dependencies():
dep.package.do_install(**kwargs)
@property
@@ -1262,7 +1273,7 @@ class Package(object):
(self.name, self.extendee.name))
def do_activate(self, force=False):
- """Called on an etension to invoke the extendee's activate method.
+ """Called on an extension to invoke the extendee's activate method.
Commands should call this routine, and should not call
activate() directly.
@@ -1274,7 +1285,7 @@ class Package(object):
# Activate any package dependencies that are also extensions.
if not force:
- for spec in self.spec.traverse(root=False):
+ for spec in self.spec.traverse(root=False, deptype='run'):
if spec.package.extends(self.extendee_spec):
if not spec.package.activated:
spec.package.do_activate(force=force)
@@ -1320,7 +1331,7 @@ class Package(object):
for name, aspec in activated.items():
if aspec == self.spec:
continue
- for dep in aspec.traverse():
+ for dep in aspec.traverse(deptype='run'):
if self.spec == dep:
raise ActivationError(
"Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501
@@ -1406,9 +1417,10 @@ class Package(object):
def rpath(self):
"""Get the rpath this package links with, as a list of paths."""
rpaths = [self.prefix.lib, self.prefix.lib64]
- rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
+ deps = self.spec.dependencies(deptype='link')
+ rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
+ rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
return rpaths
diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py
index 4d8adac384..4d3f59c320 100644
--- a/lib/spack/spack/platforms/linux.py
+++ b/lib/spack/spack/platforms/linux.py
@@ -1,16 +1,23 @@
import subprocess
+import platform
from spack.architecture import Platform, Target
from spack.operating_systems.linux_distro import LinuxDistro
class Linux(Platform):
priority = 90
- front_end = 'x86_64'
- back_end = 'x86_64'
- default = 'x86_64'
def __init__(self):
super(Linux, self).__init__('linux')
- self.add_target(self.default, Target(self.default))
+ self.add_target('x86_64', Target('x86_64'))
+ self.add_target('ppc64le', Target('ppc64le'))
+
+ self.default = platform.machine()
+ self.front_end = platform.machine()
+ self.back_end = platform.machine()
+
+ if self.default not in self.targets:
+ self.add_target(self.default, Target(self.default))
+
linux_dist = LinuxDistro()
self.default_os = str(linux_dist)
self.front_os = self.default_os
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 77bc57147d..d3a5f66e57 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -155,6 +155,9 @@ _separators = '[%s]' % ''.join(color_formats.keys())
every time we call str()"""
_any_version = VersionList([':'])
+alldeps = ('build', 'link', 'run')
+nolink = ('build', 'run')
+
def index_specs(specs):
"""Take a list of specs and return a dict of lists. Dict is
@@ -292,6 +295,27 @@ class CompilerSpec(object):
@key_ordering
+class DependencySpec(object):
+ """
+ Dependencies have conditions in which they apply.
+
+ This stores both what is depended on and why it is a dependency.
+ """
+ def __init__(self, spec, deptypes):
+ self.spec = spec
+ self.deptypes = deptypes
+
+ def _cmp_key(self):
+ return self.spec
+
+ def copy(self):
+ return DependencySpec(self.spec.copy(), self.deptype)
+
+ def __str__(self):
+ return str(self.spec)
+
+
+@key_ordering
class VariantSpec(object):
"""Variants are named, build-time options for a package. Names depend
@@ -440,11 +464,11 @@ class DependencyMap(HashableMap):
The DependencyMap is keyed by name. """
@property
def concrete(self):
- return all(d.concrete for d in self.values())
+ return all(d.spec.concrete for d in self.values())
def __str__(self):
return ''.join(
- ["^" + str(self[name]) for name in sorted(self.keys())])
+ ["^" + str(self[name].spec) for name in sorted(self.keys())])
@key_ordering
@@ -472,13 +496,13 @@ class Spec(object):
# writes directly into this Spec object.
other = spec_list[0]
self.name = other.name
- self.dependents = other.dependents
self.versions = other.versions
self.architecture = other.architecture
self.compiler = other.compiler
self.compiler_flags = other.compiler_flags
self.compiler_flags.spec = self
- self.dependencies = other.dependencies
+ self._dependencies = other._dependencies
+ self._dependents = other._dependents
self.variants = other.variants
self.variants.spec = self
self.namespace = other.namespace
@@ -500,7 +524,49 @@ class Spec(object):
# Spec(a, b) will copy a but just add b as a dep.
for dep in dep_like:
spec = dep if isinstance(dep, Spec) else Spec(dep)
- self._add_dependency(spec)
+ # XXX(deptype): default deptypes
+ self._add_dependency(spec, ('build', 'link'))
+
+ def get_dependency(self, name):
+ dep = self._dependencies.get(name)
+ if dep is not None:
+ return dep
+ raise InvalidDependencyException(
+ self.name + " does not depend on " + comma_or(name))
+
+ def _deptype_norm(self, deptype):
+ if deptype is None:
+ return alldeps
+ # Force deptype to be a set object so that we can do set intersections.
+ if isinstance(deptype, str):
+ return (deptype,)
+ return deptype
+
+ def _find_deps(self, where, deptype):
+ deptype = self._deptype_norm(deptype)
+
+ return [dep.spec
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes)]
+
+ def dependencies(self, deptype=None):
+ return self._find_deps(self._dependencies, deptype)
+
+ def dependents(self, deptype=None):
+ return self._find_deps(self._dependents, deptype)
+
+ def _find_deps_dict(self, where, deptype):
+ deptype = self._deptype_norm(deptype)
+
+ return [(dep.spec.name, dep)
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes)]
+
+ def dependencies_dict(self, deptype=None):
+ return dict(self._find_deps_dict(self._dependencies, deptype))
+
+ def dependents_dict(self, deptype=None):
+ return dict(self._find_deps_dict(self._dependents, deptype))
#
# Private routines here are called by the parser when building a spec.
@@ -609,13 +675,13 @@ class Spec(object):
if self.architecture.platform:
self.architecture.target = self.architecture.platform.target(value)
- def _add_dependency(self, spec):
+ def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
- if spec.name in self.dependencies:
+ if spec.name in self._dependencies:
raise DuplicateDependencyError(
"Cannot depend on '%s' twice" % spec)
- self.dependencies[spec.name] = spec
- spec.dependents[self.name] = self
+ self._dependencies[spec.name] = DependencySpec(spec, deptypes)
+ spec._dependents[self.name] = DependencySpec(self, deptypes)
#
# Public interface
@@ -632,15 +698,15 @@ class Spec(object):
installed). This will throw an assertion error if that is not
the case.
"""
- if not self.dependents:
+ if not self._dependents:
return self
# If the spec has multiple dependents, ensure that they all
# lead to the same place. Spack shouldn't deal with any DAGs
# with multiple roots, so something's wrong if we find one.
- depiter = iter(self.dependents.values())
- first_root = next(depiter).root
- assert(all(first_root is d.root for d in depiter))
+ depiter = iter(self._dependents.values())
+ first_root = next(depiter).spec.root
+ assert(all(first_root is d.spec.root for d in depiter))
return first_root
@property
@@ -687,10 +753,21 @@ class Spec(object):
and self.architecture.concrete
and self.compiler and self.compiler.concrete
and self.compiler_flags.concrete
- and self.dependencies.concrete)
+ and self._dependencies.concrete)
return self._concrete
- def traverse(self, visited=None, d=0, **kwargs):
+ def traverse(self, visited=None, deptype=None, **kwargs):
+ traversal = self.traverse_with_deptype(visited=visited,
+ deptype=deptype,
+ **kwargs)
+ if kwargs.get('depth', False):
+ return [(s[0], s[1].spec) for s in traversal]
+ else:
+ return [s.spec for s in traversal]
+
+ def traverse_with_deptype(self, visited=None, d=0, deptype=None,
+ deptype_query=None, _self_deptype=None,
+ **kwargs):
"""Generic traversal of the DAG represented by this spec.
This will yield each node in the spec. Options:
@@ -742,6 +819,12 @@ class Spec(object):
direction = kwargs.get('direction', 'children')
order = kwargs.get('order', 'pre')
+ if deptype is None:
+ deptype = alldeps
+
+ if deptype_query is None:
+ deptype_query = ('link', 'run')
+
# Make sure kwargs have legal values; raise ValueError if not.
def validate(name, val, allowed_values):
if val not in allowed_values:
@@ -759,30 +842,37 @@ class Spec(object):
if key in visited and cover == 'nodes':
return
- # Determine whether and what to yield for this node.
+ def return_val(res):
+ return (d, res) if depth else res
+
yield_me = yield_root or d > 0
- result = (d, self) if depth else self
# Preorder traversal yields before successors
if yield_me and order == 'pre':
- yield result
+ yield return_val(DependencySpec(self, _self_deptype))
+
+ deps = self.dependencies_dict(deptype)
# Edge traversal yields but skips children of visited nodes
if not (key in visited and cover == 'edges'):
# This code determines direction and yields the children/parents
- successors = self.dependencies
+ successors = deps
if direction == 'parents':
- successors = self.dependents
+ successors = self.dependents_dict()
visited.add(key)
for name in sorted(successors):
child = successors[name]
- for elt in child.traverse(visited, d + 1, **kwargs):
+ children = child.spec.traverse_with_deptype(
+ visited, d=d + 1, deptype=deptype_query,
+ deptype_query=deptype_query,
+ _self_deptype=child.deptypes, **kwargs)
+ for elt in children:
yield elt
# Postorder traversal yields after successors
if yield_me and order == 'post':
- yield result
+ yield return_val(DependencySpec(self, _self_deptype))
@property
def short_spec(self):
@@ -807,6 +897,7 @@ class Spec(object):
if self._hash:
return self._hash[:length]
else:
+ # XXX(deptype): ignore 'build' dependencies here
yaml_text = yaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
sha = hashlib.sha1(yaml_text)
@@ -819,11 +910,13 @@ class Spec(object):
params = dict((name, v.value) for name, v in self.variants.items())
params.update(dict((name, value)
for name, value in self.compiler_flags.items()))
+ deps = self.dependencies_dict(deptype=('link', 'run'))
d = {
- 'parameters' : params,
- 'arch' : self.architecture,
- 'dependencies' : dict((d, self.dependencies[d].dag_hash())
- for d in sorted(self.dependencies))
+ 'parameters': params,
+ 'arch': self.architecture,
+ 'dependencies': dict((d, (deps[d].spec.dag_hash(),
+ deps[d].deptypes))
+ for d in sorted(deps.keys()))
}
# Older concrete specs do not have a namespace. Omit for
@@ -848,7 +941,7 @@ class Spec(object):
def to_yaml(self, stream=None):
node_list = []
- for s in self.traverse(order='pre'):
+ for s in self.traverse(order='pre', deptype=('link', 'run')):
node = s.to_node_dict()
node[s.name]['hash'] = s.dag_hash()
node_list.append(node)
@@ -889,6 +982,11 @@ class Spec(object):
raise SpackRecordError(
"Did not find a valid format for variants in YAML file")
+ # XXX(deptypes): why are dependencies not meant to be read here?
+ #for name, dep_info in node['dependencies'].items():
+ # (dag_hash, deptypes) = dep_info
+ # spec._dependencies[name] = DependencySpec(dag_hash, deptypes)
+
return spec
@staticmethod
@@ -919,8 +1017,10 @@ class Spec(object):
for node in yfile['spec']:
name = next(iter(node))
- for dep_name in node[name]['dependencies']:
- deps[name].dependencies[dep_name] = deps[dep_name]
+ for dep_name, (dep, deptypes) in \
+ node[name]['dependencies'].items():
+ deps[name]._dependencies[dep_name] = \
+ DependencySpec(deps[dep_name], deptypes)
return spec
def _concretize_helper(self, presets=None, visited=None):
@@ -940,8 +1040,9 @@ class Spec(object):
changed = False
# Concretize deps first -- this is a bottom-up process.
- for name in sorted(self.dependencies.keys()):
- changed |= self.dependencies[name]._concretize_helper(presets, visited)
+ for name in sorted(self._dependencies.keys()):
+ changed |= self._dependencies[
+ name].spec._concretize_helper(presets, visited)
if self.name in presets:
changed |= self.constrain(presets[self.name])
@@ -965,13 +1066,16 @@ class Spec(object):
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert(self.virtual)
- for name, dependent in self.dependents.items():
+ for name, dep_spec in self._dependents.items():
+ dependent = dep_spec.spec
+ deptypes = dep_spec.deptypes
+
# remove self from all dependents.
- del dependent.dependencies[self.name]
+ del dependent._dependencies[self.name]
# add the replacement, unless it is already a dep of dependent.
- if concrete.name not in dependent.dependencies:
- dependent._add_dependency(concrete)
+ if concrete.name not in dependent._dependencies:
+ dependent._add_dependency(concrete, deptypes)
def _replace_node(self, replacement):
"""Replace this spec with another.
@@ -982,13 +1086,15 @@ class Spec(object):
to be normalized.
"""
- for name, dependent in self.dependents.items():
- del dependent.dependencies[self.name]
- dependent._add_dependency(replacement)
+ for name, dep_spec in self._dependents.items():
+ dependent = dep_spec.spec
+ deptypes = dep_spec.deptypes
+ del dependent._dependencies[self.name]
+ dependent._add_dependency(replacement, deptypes)
- for name, dep in self.dependencies.items():
- del dep.dependents[self.name]
- del self.dependencies[dep.name]
+ for name, dep_spec in self._dependencies.items():
+ del dep_spec.spec.dependents[self.name]
+ del self._dependencies[dep.name]
def _expand_virtual_packages(self):
"""Find virtual packages in this spec, replace them with providers,
@@ -1008,13 +1114,14 @@ class Spec(object):
a problem.
"""
# Make an index of stuff this spec already provides
+ # XXX(deptype): 'link' and 'run'?
self_index = ProviderIndex(self.traverse(), restrict=True)
changed = False
done = False
while not done:
done = True
-
+ # XXX(deptype): 'link' and 'run'?
for spec in list(self.traverse()):
replacement = None
if spec.virtual:
@@ -1054,10 +1161,10 @@ class Spec(object):
# If replacement is external then trim the dependencies
if replacement.external or replacement.external_module:
- if (spec.dependencies):
+ if (spec._dependencies):
changed = True
- spec.dependencies = DependencyMap()
- replacement.dependencies = DependencyMap()
+ spec._dependencies = DependencyMap()
+ replacement._dependencies = DependencyMap()
replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
@@ -1068,7 +1175,7 @@ class Spec(object):
feq(replacement.versions, spec.versions) and
feq(replacement.compiler, spec.compiler) and
feq(replacement.architecture, spec.architecture) and
- feq(replacement.dependencies, spec.dependencies) and
+ feq(replacement._dependencies, spec._dependencies) and
feq(replacement.variants, spec.variants) and
feq(replacement.external, spec.external) and
feq(replacement.external_module, spec.external_module)):
@@ -1116,7 +1223,7 @@ class Spec(object):
changed = any(changes)
force = True
- for s in self.traverse():
+ for s in self.traverse(deptype_query=alldeps):
# After concretizing, assign namespaces to anything left.
# Note that this doesn't count as a "change". The repository
# configuration is constant throughout a spack run, and
@@ -1146,7 +1253,7 @@ class Spec(object):
Only for internal use -- client code should use "concretize"
unless there is a need to force a spec to be concrete.
"""
- for s in self.traverse():
+ for s in self.traverse(deptype_query=alldeps):
s._normal = True
s._concrete = True
@@ -1159,6 +1266,13 @@ class Spec(object):
return clone
def flat_dependencies(self, **kwargs):
+ flat_deps = DependencyMap()
+ flat_deps_deptypes = self.flat_dependencies_with_deptype(**kwargs)
+ for name, depspec in flat_deps_deptypes.items():
+ flat_deps[name] = depspec.spec
+ return flat_deps
+
+ def flat_dependencies_with_deptype(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
dependencies with their constraints merged.
@@ -1169,23 +1283,31 @@ class Spec(object):
returns them.
"""
copy = kwargs.get('copy', True)
+ deptype_query = kwargs.get('deptype_query')
flat_deps = DependencyMap()
try:
- for spec in self.traverse(root=False):
+ deptree = self.traverse_with_deptype(root=False,
+ deptype_query=deptype_query)
+ for depspec in deptree:
+ spec = depspec.spec
+ deptypes = depspec.deptypes
+
if spec.name not in flat_deps:
if copy:
- flat_deps[spec.name] = spec.copy(deps=False)
+ dep_spec = DependencySpec(spec.copy(deps=False),
+ deptypes)
else:
- flat_deps[spec.name] = spec
+ dep_spec = DependencySpec(spec, deptypes)
+ flat_deps[spec.name] = dep_spec
else:
- flat_deps[spec.name].constrain(spec)
+ flat_deps[spec.name].spec.constrain(spec)
if not copy:
- for dep in flat_deps.values():
- dep.dependencies.clear()
- dep.dependents.clear()
- self.dependencies.clear()
+ for depspec in flat_deps.values():
+ depspec.spec._dependencies.clear()
+ depspec.spec._dependents.clear()
+ self._dependencies.clear()
return flat_deps
@@ -1200,17 +1322,11 @@ class Spec(object):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
+ # XXX(deptype): use a deptype kwarg.
for spec in self.traverse():
dm[spec.name] = spec
return dm
- def flatten(self):
- """Pull all dependencies up to the root (this spec).
- Merge constraints for dependencies with the same name, and if they
- conflict, throw an exception. """
- for dep in self.flat_dependencies(copy=False):
- self._add_dependency(dep)
-
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
@@ -1267,7 +1383,8 @@ class Spec(object):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
- def _merge_dependency(self, dep, visited, spec_deps, provider_index):
+ def _merge_dependency(self, dep, deptypes, visited, spec_deps,
+ provider_index):
"""Merge the dependency into this spec.
This is the core of normalize(). There are some basic steps:
@@ -1294,7 +1411,9 @@ class Spec(object):
dep = provider
else:
index = ProviderIndex([dep], restrict=True)
- for vspec in (v for v in spec_deps.values() if v.virtual):
+ for vspec in (v.spec
+ for v in spec_deps.values()
+ if v.spec.virtual):
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
@@ -1307,25 +1426,25 @@ class Spec(object):
# If the spec isn't already in the set of dependencies, clone
# it from the package description.
if dep.name not in spec_deps:
- spec_deps[dep.name] = dep.copy()
+ spec_deps[dep.name] = DependencySpec(dep.copy(), deptypes)
changed = True
# Constrain package information with spec info
try:
- changed |= spec_deps[dep.name].constrain(dep)
+ changed |= spec_deps[dep.name].spec.constrain(dep)
except UnsatisfiableSpecError, e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
- e.message %= (spec_deps[dep.name], dep.name, e.constraint_type,
- e.required, e.provided)
+ e.message %= (spec_deps[dep.name].spec, dep.name,
+ e.constraint_type, e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
- if dep.name not in self.dependencies:
- self._add_dependency(dependency)
+ if dep.name not in self._dependencies:
+ self._add_dependency(dependency.spec, dependency.deptypes)
- changed |= dependency._normalize_helper(
+ changed |= dependency.spec._normalize_helper(
visited, spec_deps, provider_index)
return changed
@@ -1351,10 +1470,11 @@ class Spec(object):
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
+ deptypes = pkg._deptypes[dep_name]
# If pkg_dep is a dependency, merge it.
if pkg_dep:
changed |= self._merge_dependency(
- pkg_dep, visited, spec_deps, provider_index)
+ pkg_dep, deptypes, visited, spec_deps, provider_index)
any_change |= changed
return any_change
@@ -1385,11 +1505,13 @@ class Spec(object):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
# Get all the dependencies into one DependencyMap
- spec_deps = self.flat_dependencies(copy=False)
+ spec_deps = self.flat_dependencies_with_deptype(copy=False,
+ deptype_query=alldeps)
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
- provider_index = ProviderIndex(spec_deps.values(), restrict=True)
+ provider_index = ProviderIndex([s.spec for s in spec_deps.values()],
+ restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
@@ -1510,7 +1632,7 @@ class Spec(object):
"""Apply constraints of other spec's dependencies to this spec."""
other = self._autospec(other)
- if not self.dependencies or not other.dependencies:
+ if not self._dependencies or not other._dependencies:
return False
# TODO: might want more detail than this, e.g. specific deps
@@ -1526,13 +1648,17 @@ class Spec(object):
# Update with additional constraints from other spec
for name in other.dep_difference(self):
- self._add_dependency(other[name].copy())
+ dep_spec_copy = other.get_dependency(name)
+ dep_copy = dep_spec_copy.spec
+ deptypes = dep_spec_copy.deptypes
+ self._add_dependency(dep_copy.copy(), deptypes)
changed = True
return changed
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
+ # XXX(deptype): handle deptypes via deptype kwarg.
common = set(
s.name for s in self.traverse(root=False))
common.intersection_update(
@@ -1657,13 +1783,14 @@ class Spec(object):
other = self._autospec(other)
if strict:
- if other.dependencies and not self.dependencies:
+ if other._dependencies and not self._dependencies:
return False
- if not all(dep in self.dependencies for dep in other.dependencies):
+ if not all(dep in self._dependencies
+ for dep in other._dependencies):
return False
- elif not self.dependencies or not other.dependencies:
+ elif not self._dependencies or not other._dependencies:
# if either spec doesn't restrict dependencies then both are
# compatible.
return True
@@ -1726,8 +1853,8 @@ class Spec(object):
self.architecture = other.architecture
self.compiler = other.compiler.copy() if other.compiler else None
if kwargs.get('cleardeps', True):
- self.dependents = DependencyMap()
- self.dependencies = DependencyMap()
+ self._dependents = DependencyMap()
+ self._dependencies = DependencyMap()
self.compiler_flags = other.compiler_flags.copy()
self.variants = other.variants.copy()
self.variants.spec = self
@@ -1739,15 +1866,30 @@ class Spec(object):
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
# This copies the deps from other using _dup(deps=False)
- new_nodes = other.flat_dependencies()
+ # XXX(deptype): We can keep different instances of specs here iff
+ # it is only a 'build' dependency (from its parent).
+ # All other instances must be shared (due to symbol
+ # and PATH contention). These should probably search
+ # for any existing installation which can satisfy the
+ # build and latch onto that because if 3 things need
+ # the same build dependency and it is *not*
+ # available, we only want to build it once.
+ new_nodes = other.flat_dependencies(deptype_query=alldeps)
new_nodes[self.name] = self
- # Hook everything up properly here by traversing.
- for spec in other.traverse(cover='nodes'):
- parent = new_nodes[spec.name]
- for child in spec.dependencies:
- if child not in parent.dependencies:
- parent._add_dependency(new_nodes[child])
+ stack = [other]
+ while stack:
+ cur_spec = stack.pop(0)
+ new_spec = new_nodes[cur_spec.name]
+
+ for depspec in cur_spec._dependencies.values():
+ stack.append(depspec.spec)
+
+ # XXX(deptype): add any new deptypes that may have appeared
+ # here.
+ if depspec.spec.name not in new_spec._dependencies:
+ new_spec._add_dependency(
+ new_nodes[depspec.spec.name], depspec.deptypes)
# Since we preserved structure, we can copy _normal safely.
self._normal = other._normal
@@ -1790,7 +1932,7 @@ class Spec(object):
raise KeyError("No spec with name %s in %s" % (name, self))
def __contains__(self, spec):
- """True if this spec satisfis the provided spec, or if any dependency
+ """True if this spec satisfies the provided spec, or if any dependency
does. If the spec has no name, then we parse this one first.
"""
spec = self._autospec(spec)
@@ -1814,13 +1956,13 @@ class Spec(object):
if self.ne_node(other):
return False
- if len(self.dependencies) != len(other.dependencies):
+ if len(self._dependencies) != len(other._dependencies):
return False
- ssorted = [self.dependencies[name]
- for name in sorted(self.dependencies)]
- osorted = [other.dependencies[name]
- for name in sorted(other.dependencies)]
+ ssorted = [self._dependencies[name].spec
+ for name in sorted(self._dependencies)]
+ osorted = [other._dependencies[name].spec
+ for name in sorted(other._dependencies)]
for s, o in zip(ssorted, osorted):
visited_s = id(s) in vs
@@ -1874,9 +2016,10 @@ class Spec(object):
1. A tuple describing this node in the DAG.
2. The hash of each of this node's dependencies' cmp_keys.
"""
+ dep_dict = self.dependencies_dict(deptype=('link', 'run'))
return self._cmp_node() + (
- tuple(hash(self.dependencies[name])
- for name in sorted(self.dependencies)),)
+ tuple(hash(dep_dict[name])
+ for name in sorted(dep_dict)),)
def colorized(self):
return colorize_spec(self)
@@ -2081,8 +2224,8 @@ class Spec(object):
self.architecture, other.architecture)
#Dependency is not configurable
- if self.dependencies != other.dependencies:
- return -1 if self.dependencies < other.dependencies else 1
+ if self._dependencies != other._dependencies:
+ return -1 if self._dependencies < other._dependencies else 1
#Equal specs
return 0
@@ -2196,10 +2339,13 @@ class SpecParser(spack.parse.Parser):
specs.append(self.spec(None))
self.previous = None
if self.accept(HASH):
- specs[-1]._add_dependency(self.spec_by_hash())
+ dep = self.spec_by_hash()
else:
self.expect(ID)
- specs[-1]._add_dependency(self.spec(self.token.value))
+ dep = self.spec(self.token.value)
+ # XXX(deptype): default deptypes
+ def_deptypes = ('build', 'link')
+ specs[-1]._add_dependency(dep, def_deptypes)
else:
# Attempt to construct an anonymous spec, but check that
@@ -2263,8 +2409,8 @@ class SpecParser(spack.parse.Parser):
spec.external = None
spec.external_module = None
spec.compiler_flags = FlagMap(spec)
- spec.dependents = DependencyMap()
- spec.dependencies = DependencyMap()
+ spec._dependents = DependencyMap()
+ spec._dependencies = DependencyMap()
spec.namespace = spec_namespace
spec._hash = None
diff --git a/lib/spack/spack/test/cmd/test_install.py b/lib/spack/spack/test/cmd/test_install.py
index d17e013ed2..a94d3c8bba 100644
--- a/lib/spack/spack/test/cmd/test_install.py
+++ b/lib/spack/spack/test/cmd/test_install.py
@@ -58,16 +58,39 @@ test_install = __import__("spack.cmd.test-install", fromlist=['test_install'])
class MockSpec(object):
def __init__(self, name, version, hashStr=None):
- self.dependencies = {}
+ self._dependencies = {}
self.name = name
self.version = version
self.hash = hashStr if hashStr else hash((name, version))
+ def _deptype_norm(self, deptype):
+ if deptype is None:
+ return spack.alldeps
+ # Force deptype to be a tuple so that we can do set intersections.
+ if isinstance(deptype, str):
+ return (deptype,)
+ return deptype
+
+ def _find_deps(self, where, deptype):
+ deptype = self._deptype_norm(deptype)
+
+ return [dep.spec
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes)]
+
+ def dependencies(self, deptype=None):
+ return self._find_deps(self._dependencies, deptype)
+
+ def dependents(self, deptype=None):
+ return self._find_deps(self._dependents, deptype)
+
def traverse(self, order=None):
- for _, spec in self.dependencies.items():
- yield spec
+ for _, spec in self._dependencies.items():
+ yield spec.spec
yield self
- #allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
+ #from_iterable = itertools.chain.from_iterable
+ #allDeps = from_iterable(i.traverse()
+ # for i in self.dependencies())
#return set(itertools.chain([self], allDeps))
def dag_hash(self):
@@ -104,7 +127,7 @@ def mock_fetch_log(path):
specX = MockSpec('X', "1.2.0")
specY = MockSpec('Y', "2.3.8")
-specX.dependencies['Y'] = specY
+specX._dependencies['Y'] = spack.DependencySpec(specY, spack.alldeps)
pkgX = MockPackage(specX, 'logX')
pkgY = MockPackage(specY, 'logY')
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index ce02b08bc3..ae3ceecfc8 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -197,32 +197,36 @@ class ConcretizeTest(MockPackagesTest):
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
spec = Spec('callpath ^mpi@10.0')
- self.assertTrue('mpi' in spec.dependencies)
+ self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
- self.assertTrue('zmpi' in spec.dependencies)
- self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
+ self.assertTrue('zmpi' in spec._dependencies)
+ self.assertTrue(all('mpi' not in d._dependencies
+ for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)
- self.assertTrue('fake' in spec.dependencies['zmpi'])
+ self.assertTrue('fake' in spec._dependencies['zmpi'].spec)
def test_virtual_is_fully_expanded_for_mpileaks(self):
spec = Spec('mpileaks ^mpi@10.0')
- self.assertTrue('mpi' in spec.dependencies)
+ self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
- self.assertTrue('zmpi' in spec.dependencies)
- self.assertTrue('callpath' in spec.dependencies)
- self.assertTrue('zmpi' in spec.dependencies['callpath'].dependencies)
- self.assertTrue('fake' in spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
+ self.assertTrue('zmpi' in spec._dependencies)
+ self.assertTrue('callpath' in spec._dependencies)
+ self.assertTrue('zmpi' in spec._dependencies['callpath'].
+ spec._dependencies)
+ self.assertTrue('fake' in spec._dependencies['callpath'].
+ spec._dependencies['zmpi'].
+ spec._dependencies)
- self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
+ self.assertTrue(all(not 'mpi' in d._dependencies for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index e1acc32cb7..9d96622a6e 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -84,15 +84,6 @@ compilers:
modules: 'None'
- compiler:
spec: clang@3.3
- operating_system: redhat6
- paths:
- cc: /path/to/clang
- cxx: /path/to/clang++
- f77: None
- fc: None
- modules: 'None'
-- compiler:
- spec: clang@3.3
operating_system: yosemite
paths:
cc: /path/to/clang
@@ -124,15 +115,6 @@ compilers:
cxx: /path/to/g++
f77: /path/to/gfortran
fc: /path/to/gfortran
- operating_system: redhat6
- spec: gcc@4.5.0
- modules: 'None'
-- compiler:
- paths:
- cc: /path/to/gcc
- cxx: /path/to/g++
- f77: /path/to/gfortran
- fc: /path/to/gfortran
operating_system: yosemite
spec: gcc@4.5.0
modules: 'None'
@@ -209,8 +191,7 @@ class MockPackagesTest(unittest.TestCase):
# restore later.
self.saved_deps = {}
-
- def set_pkg_dep(self, pkg_name, spec):
+ def set_pkg_dep(self, pkg_name, spec, deptypes=spack.alldeps):
"""Alters dependence information for a package.
Adds a dependency on <spec> to pkg.
@@ -224,7 +205,9 @@ class MockPackagesTest(unittest.TestCase):
self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
# Change dep spec
- pkg.dependencies[spec.name] = { Spec(pkg_name) : spec }
+ # XXX(deptype): handle deptypes.
+ pkg.dependencies[spec.name] = {Spec(pkg_name): spec}
+ pkg._deptypes[spec.name] = set(deptypes)
def cleanmock(self):
@@ -234,6 +217,7 @@ class MockPackagesTest(unittest.TestCase):
shutil.rmtree(self.temp_config, ignore_errors=True)
spack.config.clear_config_caches()
+ # XXX(deptype): handle deptypes.
# Restore dependency changes that happened during the test
for pkg_name, (pkg, deps) in self.saved_deps.items():
pkg.dependencies.clear()
diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py
index a8098b8eec..386af282e7 100644
--- a/lib/spack/spack/test/mock_repo.py
+++ b/lib/spack/spack/test/mock_repo.py
@@ -103,6 +103,8 @@ class MockGitRepo(MockVCSRepo):
def __init__(self):
super(MockGitRepo, self).__init__('mock-git-stage', 'mock-git-repo')
+ self.url = 'file://' + self.path
+
with working_dir(self.path):
git('init')
@@ -140,8 +142,6 @@ class MockGitRepo(MockVCSRepo):
self.r1 = self.rev_hash(self.branch)
self.r1_file = self.branch_file
- self.url = self.path
-
def rev_hash(self, rev):
return git('rev-parse', rev, output=str).strip()
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index c56c70b1fe..972e79aa20 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -148,10 +148,12 @@ class SpecDagTest(MockPackagesTest):
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
- mpileaks.dependencies['mpich'] = Spec('mpich@1.0')
- mpileaks.dependencies['callpath'].dependencies['mpich'] = Spec('mpich@2.0')
+ mpileaks._dependencies['mpich'].spec = Spec('mpich@1.0')
+ mpileaks._dependencies['callpath']. \
+ spec._dependencies['mpich'].spec = Spec('mpich@2.0')
- self.assertRaises(spack.spec.InconsistentSpecError, mpileaks.flatten)
+ self.assertRaises(spack.spec.InconsistentSpecError,
+ lambda: mpileaks.flat_dependencies(copy=False))
def test_normalize_twice(self):
@@ -197,15 +199,17 @@ class SpecDagTest(MockPackagesTest):
def check_links(self, spec_to_check):
for spec in spec_to_check.traverse():
- for dependent in spec.dependents.values():
+ for dependent in spec.dependents():
self.assertTrue(
- spec.name in dependent.dependencies,
- "%s not in dependencies of %s" % (spec.name, dependent.name))
+ spec.name in dependent.dependencies_dict(),
+ "%s not in dependencies of %s" %
+ (spec.name, dependent.name))
- for dependency in spec.dependencies.values():
+ for dependency in spec.dependencies():
self.assertTrue(
- spec.name in dependency.dependents,
- "%s not in dependents of %s" % (spec.name, dependency.name))
+ spec.name in dependency.dependents_dict(),
+ "%s not in dependents of %s" %
+ (spec.name, dependency.name))
def test_dependents_and_dependencies_are_correct(self):
@@ -442,3 +446,69 @@ class SpecDagTest(MockPackagesTest):
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
+
+ """
+ Here is the graph with deptypes labeled (assume all packages have a 'dt'
+ prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
+ 'link', 'r' for 'run').
+
+ use -bl-> top
+
+ top -b-> build1
+ top -bl-> link1
+ top -r-> run1
+
+ build1 -b-> build2
+ build1 -bl-> link2
+ build1 -r-> run2
+
+ link1 -bl-> link3
+
+ run1 -bl-> link5
+ run1 -r-> run3
+
+ link3 -b-> build2
+ link3 -bl-> link4
+
+ run3 -b-> build3
+ """
+ def test_deptype_traversal(self):
+ dag = Spec('dtuse')
+ dag.normalize()
+
+ names = ['dtuse', 'dttop', 'dtlink1', 'dtlink3', 'dtlink4',
+ 'dtrun1', 'dtlink5', 'dtrun3']
+
+ traversal = dag.traverse()
+ self.assertEqual([x.name for x in traversal], names)
+
+ def test_deptype_traversal_with_builddeps(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtbuild1', 'dtlink2', 'dtrun2', 'dtlink1',
+ 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtrun3']
+
+ traversal = dag.traverse()
+ self.assertEqual([x.name for x in traversal], names)
+
+ def test_deptype_traversal_full(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', 'dtrun2',
+ 'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
+ 'dtrun3', 'dtbuild3']
+
+ traversal = dag.traverse(deptype_query=spack.alldeps)
+ self.assertEqual([x.name for x in traversal], names)
+
+ def test_deptype_traversal_pythonpath(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtbuild1', 'dtrun2', 'dtlink1', 'dtrun1',
+ 'dtrun3']
+
+ traversal = dag.traverse(deptype=spack.nolink, deptype_query='run')
+ self.assertEqual([x.name for x in traversal], names)
diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py
index 6d8c3ac67c..d3e3bf1383 100644
--- a/lib/spack/spack/test/stage.py
+++ b/lib/spack/spack/test/stage.py
@@ -35,8 +35,8 @@ from llnl.util.filesystem import *
from spack.stage import Stage
from spack.util.executable import which
-test_files_dir = join_path(spack.stage_path, '.test')
-test_tmp_path = join_path(test_files_dir, 'tmp')
+test_files_dir = os.path.realpath(join_path(spack.stage_path, '.test'))
+test_tmp_path = os.path.realpath(join_path(test_files_dir, 'tmp'))
archive_dir = 'test-files'
archive_name = archive_dir + '.tar.gz'
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index 4624f901c8..41d72e7c34 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -92,6 +92,9 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_eq('1.0', '1.0')
self.assert_ver_lt('1.0', '2.0')
self.assert_ver_gt('2.0', '1.0')
+ self.assert_ver_eq('develop', 'develop')
+ self.assert_ver_lt('1.0', 'develop')
+ self.assert_ver_gt('develop', '1.0')
def test_three_segments(self):
self.assert_ver_eq('2.0.1', '2.0.1')
@@ -389,3 +392,39 @@ class VersionsTest(unittest.TestCase):
self.assertEqual(v.dotted, '1.2.3')
self.assertEqual(v.dashed, '1-2-3')
self.assertEqual(v.underscored, '1_2_3')
+
+ def test_repr_and_str(self):
+
+ def check_repr_and_str(vrs):
+ a = Version(vrs)
+ self.assertEqual(repr(a), 'Version(\'' + vrs + '\')')
+ b = eval(repr(a))
+ self.assertEqual(a, b)
+ self.assertEqual(str(a), vrs)
+ self.assertEqual(str(a), str(b))
+
+ check_repr_and_str('1.2.3')
+ check_repr_and_str('R2016a')
+ check_repr_and_str('R2016a.2-3_4')
+
+ def test_get_item(self):
+ a = Version('0.1_2-3')
+ self.assertTrue(isinstance(a[1], int))
+ # Test slicing
+ b = a[0:2]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('0.1'))
+ self.assertEqual(repr(b), 'Version(\'0.1\')')
+ self.assertEqual(str(b), '0.1')
+ b = a[0:3]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('0.1_2'))
+ self.assertEqual(repr(b), 'Version(\'0.1_2\')')
+ self.assertEqual(str(b), '0.1_2')
+ b = a[1:]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('1_2-3'))
+ self.assertEqual(repr(b), 'Version(\'1_2-3\')')
+ self.assertEqual(str(b), '1_2-3')
+ # Raise TypeError on tuples
+ self.assertRaises(TypeError, b.__getitem__, 1, 2)
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 858d581472..6f6c83b3d8 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -44,6 +44,7 @@ be called on any of the types::
concrete
"""
import re
+import numbers
from bisect import bisect_left
from functools import wraps
@@ -194,10 +195,24 @@ class Version(object):
return iter(self.version)
def __getitem__(self, idx):
- return tuple(self.version[idx])
+ cls = type(self)
+ if isinstance(idx, numbers.Integral):
+ return self.version[idx]
+ elif isinstance(idx, slice):
+ # Currently len(self.separators) == len(self.version) - 1
+ extendend_separators = self.separators + ('',)
+ string_arg = []
+ for token, sep in zip(self.version, extendend_separators)[idx]:
+ string_arg.append(str(token))
+ string_arg.append(str(sep))
+ string_arg.pop() # We don't need the last separator
+ string_arg = ''.join(string_arg)
+ return cls(string_arg)
+ message = '{cls.__name__} indices must be integers'
+ raise TypeError(message.format(cls=cls))
def __repr__(self):
- return self.string
+ return 'Version(' + repr(self.string) + ')'
def __str__(self):
return self.string
@@ -221,6 +236,14 @@ class Version(object):
if self.version == other.version:
return False
+ # dev is __gt__ than anything but itself.
+ if other.string == 'develop':
+ return True
+
+ # If lhs is dev then it can't be < than anything
+ if self.string == 'develop':
+ return False
+
for a, b in zip(self.version, other.version):
if a == b:
continue