summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorPeter Scheibel <scheibel1@llnl.gov>2016-06-20 10:16:02 -0700
committerPeter Scheibel <scheibel1@llnl.gov>2016-06-20 10:16:02 -0700
commit1dc62e8a19eb709c14bbc584c6fe04395da6ff67 (patch)
treed26c9773461c020ad7be344bb6c491e3c611f075 /lib
parent3b71d78f3cddeb1b622b9a49146adadbd1b1d9dc (diff)
parent12b9d6da104925841d731d910c6423ee81a5464f (diff)
downloadspack-1dc62e8a19eb709c14bbc584c6fe04395da6ff67.tar.gz
spack-1dc62e8a19eb709c14bbc584c6fe04395da6ff67.tar.bz2
spack-1dc62e8a19eb709c14bbc584c6fe04395da6ff67.tar.xz
spack-1dc62e8a19eb709c14bbc584c6fe04395da6ff67.zip
Merge branch 'develop' into features/cache-archives
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst270
-rw-r--r--lib/spack/docs/configuration.rst12
-rw-r--r--lib/spack/docs/features.rst17
-rw-r--r--lib/spack/docs/packaging_guide.rst48
-rwxr-xr-xlib/spack/env/cc48
-rw-r--r--lib/spack/llnl/util/tty/__init__.py4
-rw-r--r--lib/spack/llnl/util/tty/colify.py9
-rw-r--r--lib/spack/spack/__init__.py4
-rw-r--r--lib/spack/spack/abi.py5
-rw-r--r--lib/spack/spack/architecture.py506
-rw-r--r--lib/spack/spack/build_environment.py90
-rw-r--r--lib/spack/spack/cmd/arch.py6
-rw-r--r--lib/spack/spack/cmd/compiler.py18
-rw-r--r--lib/spack/spack/cmd/find.py61
-rw-r--r--lib/spack/spack/cmd/uninstall.py13
-rw-r--r--lib/spack/spack/cmd/view.py295
-rw-r--r--lib/spack/spack/compiler.py83
-rw-r--r--lib/spack/spack/compilers/__init__.py262
-rw-r--r--lib/spack/spack/compilers/clang.py2
-rw-r--r--lib/spack/spack/compilers/craype.py58
-rw-r--r--lib/spack/spack/compilers/gcc.py7
-rw-r--r--lib/spack/spack/compilers/intel.py3
-rw-r--r--lib/spack/spack/compilers/pgi.py7
-rw-r--r--lib/spack/spack/compilers/xl.py4
-rw-r--r--lib/spack/spack/concretize.py115
-rw-r--r--lib/spack/spack/config.py62
-rw-r--r--lib/spack/spack/database.py16
-rw-r--r--lib/spack/spack/directives.py2
-rw-r--r--lib/spack/spack/directory_layout.py5
-rw-r--r--lib/spack/spack/fetch_strategy.py33
-rw-r--r--lib/spack/spack/hooks/licensing.py5
-rw-r--r--lib/spack/spack/operating_systems/__init__.py0
-rw-r--r--lib/spack/spack/operating_systems/cnl.py62
-rw-r--r--lib/spack/spack/operating_systems/linux_distro.py22
-rw-r--r--lib/spack/spack/operating_systems/mac_os.py29
-rw-r--r--lib/spack/spack/package.py18
-rw-r--r--lib/spack/spack/platforms/__init__.py0
-rw-r--r--lib/spack/spack/platforms/bgq.py18
-rw-r--r--lib/spack/spack/platforms/cray_xc.py46
-rw-r--r--lib/spack/spack/platforms/darwin.py26
-rw-r--r--lib/spack/spack/platforms/linux.py24
-rw-r--r--lib/spack/spack/platforms/test.py28
-rw-r--r--lib/spack/spack/spec.py299
-rw-r--r--lib/spack/spack/test/__init__.py7
-rw-r--r--lib/spack/spack/test/architecture.py112
-rw-r--r--lib/spack/spack/test/cmd/find.py60
-rw-r--r--lib/spack/spack/test/cmd/test_compiler_cmd.py81
-rw-r--r--lib/spack/spack/test/concretize.py14
-rw-r--r--lib/spack/spack/test/config.py124
-rw-r--r--lib/spack/spack/test/environment.py5
-rw-r--r--lib/spack/spack/test/mock_packages_test.py119
-rw-r--r--lib/spack/spack/test/modules.py19
-rw-r--r--lib/spack/spack/test/multimethod.py27
-rw-r--r--lib/spack/spack/test/operating_system.py55
-rw-r--r--lib/spack/spack/test/spec_dag.py7
-rw-r--r--lib/spack/spack/test/spec_semantics.py69
-rw-r--r--lib/spack/spack/test/spec_syntax.py3
-rw-r--r--lib/spack/spack/test/versions.py54
-rw-r--r--lib/spack/spack/util/executable.py1
-rw-r--r--lib/spack/spack/version.py158
-rw-r--r--lib/spack/spack/yaml_version_check.py55
61 files changed, 2867 insertions, 745 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 6efed83621..50c48b802b 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -102,8 +102,8 @@ that the packages is installed:
==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
- ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23.
+ ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
+ ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23.
==> No patches needed for mpileaks.
==> Building mpileaks.
@@ -132,10 +132,10 @@ sites, as installing a version that one user needs will not disrupt
existing installations for other users.
In addition to different versions, Spack can customize the compiler,
-compile-time options (variants), and platform (for cross compiles) of
-an installation. Spack is unique in that it can also configure the
-*dependencies* a package is built with. For example, two
-configurations of the same version of a package, one built with boost
+compile-time options (variants), compiler flags, and platform (for
+cross compiles) of an installation. Spack is unique in that it can
+also configure the *dependencies* a package is built with. For example,
+two configurations of the same version of a package, one built with boost
1.39.0, and the other version built with version 1.43.0, can coexist.
This can all be done on the command line using the *spec* syntax.
@@ -334,9 +334,15 @@ of libelf would look like this:
-- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
+We can also search for packages that have a certain attribute. For example,
+``spack find -l libdwarf +debug`` will show only installations of libdwarf
+with the 'debug' compile-time option enabled, while ``spack find -l +debug``
+will find every installed package with a 'debug' compile-time option enabled.
+
The full spec syntax is discussed in detail in :ref:`sec-specs`.
+
Compiler configuration
-----------------------------------
@@ -463,6 +469,26 @@ For compilers, like ``clang``, that do not support Fortran, put
Once you save the file, the configured compilers will show up in the
list displayed by ``spack compilers``.
+You can also add compiler flags to manually configured compilers. The
+valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``,
+``ldflags``, and ``ldlibs``. For example,::
+
+ ...
+ chaos_5_x86_64_ib:
+ ...
+ intel@15.0.0:
+ cc: /usr/local/bin/icc-15.0.024-beta
+ cxx: /usr/local/bin/icpc-15.0.024-beta
+ f77: /usr/local/bin/ifort-15.0.024-beta
+ fc: /usr/local/bin/ifort-15.0.024-beta
+ cppflags: -O3 -fPIC
+ ...
+
+These flags will be treated by spack as if they were enterred from
+the command line each time this compiler is used. The compiler wrappers
+then inject those flags into the compiler command. Compiler flags
+enterred from the command line will be discussed in more detail in the
+following section.
.. _sec-specs:
@@ -480,7 +506,7 @@ the full syntax of specs.
Here is an example of a much longer spec than we've seen thus far::
- mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt =bgqos_0 ^callpath @1.1 %gcc@4.7.2
+ mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
If provided to ``spack install``, this will install the ``mpileaks``
library at some version between ``1.2`` and ``1.4`` (inclusive),
@@ -498,8 +524,12 @@ More formally, a spec consists of the following pieces:
* ``%`` Optional compiler specifier, with an optional compiler version
(``gcc`` or ``gcc@4.7.3``)
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
- ``-qt``, or ``~qt``)
-* ``=`` Optional architecture specifier (``bgqos_0``)
+ ``-qt``, or ``~qt``) for boolean variants
+* ``name=<value>`` Optional variant specifiers that are not restricted to
+boolean variants
+* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
+``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
+* ``arch=<value>`` Optional architecture specifier (``arch=bgq_os``)
* ``^`` Dependency specs (``^callpath@1.1``)
There are two things to notice here. The first is that specs are
@@ -579,7 +609,7 @@ compilers, variants, and architectures just like any other spec.
Specifiers are associated with the nearest package name to their left.
For example, above, ``@1.1`` and ``%gcc@4.7.2`` associates with the
``callpath`` package, while ``@1.2:1.4``, ``%gcc@4.7.5``, ``+debug``,
-``-qt``, and ``=bgqos_0`` all associate with the ``mpileaks`` package.
+``-qt``, and ``arch=bgq_os`` all associate with the ``mpileaks`` package.
In the diagram above, ``mpileaks`` depends on ``mpich`` with an
unspecified version, but packages can depend on other packages with
@@ -635,22 +665,25 @@ based on site policies.
Variants
~~~~~~~~~~~~~~~~~~~~~~~
-.. Note::
-
- Variants are not yet supported, but will be in the next Spack
- release (0.9), due in Q2 2015.
-
-Variants are named options associated with a particular package, and
-they can be turned on or off. For example, above, supplying
-``+debug`` causes ``mpileaks`` to be built with debug flags. The
-names of particular variants available for a package depend on what
-was provided by the package author. ``spack info <package>`` will
+Variants are named options associated with a particular package. They are
+optional, as each package must provide default values for each variant it
+makes available. Variants can be specified using
+a flexible parameter syntax ``name=<value>``. For example,
+``spack install libelf debug=True`` will install libelf build with debug
+flags. The names of particular variants available for a package depend on
+what was provided by the package author. ``spack into <package>`` will
provide information on what build variants are available.
-Depending on the package a variant may be on or off by default. For
-``mpileaks`` here, ``debug`` is off by default, and we turned it on
-with ``+debug``. If a package is on by default you can turn it off by
-either adding ``-name`` or ``~name`` to the spec.
+For compatibility with earlier versions, variants which happen to be
+boolean in nature can be specified by a syntax that represents turning
+options on and off. For example, in the previous spec we could have
+supplied ``libelf +debug`` with the same effect of enabling the debug
+compile time option for the libelf package.
+
+Depending on the package a variant may have any default value. For
+``libelf`` here, ``debug`` is ``False`` by default, and we turned it on
+with ``debug=True`` or ``+debug``. If a package is ``True`` by default
+you can turn it off by either adding ``-name`` or ``~name`` to the spec.
There are two syntaxes here because, depending on context, ``~`` and
``-`` may mean different things. In most shells, the following will
@@ -662,7 +695,7 @@ result in the shell performing home directory substitution:
mpileaks~debug # use this instead
If there is a user called ``debug``, the ``~`` will be incorrectly
-expanded. In this situation, you would want to write ``mpileaks
+expanded. In this situation, you would want to write ``libelf
-debug``. However, ``-`` can be ambiguous when included after a
package name without spaces:
@@ -677,12 +710,35 @@ package, not a request for ``mpileaks`` built without ``debug``
options. In this scenario, you should write ``mpileaks~debug`` to
avoid ambiguity.
-When spack normalizes specs, it prints them out with no spaces and
-uses only ``~`` for disabled variants. We allow ``-`` and spaces on
-the command line is provided for convenience and legibility.
+When spack normalizes specs, it prints them out with no spaces boolean
+variants using the backwards compatibility syntax and uses only ``~``
+for disabled boolean variants. We allow ``-`` and spaces on the command
+line is provided for convenience and legibility.
+
+Compiler Flags
+~~~~~~~~~~~~~~~~~~~~~~~
+
+Compiler flags are specified using the same syntax as non-boolean variants,
+but fulfill a different purpose. While the function of a variant is set by
+the package, compiler flags are used by the compiler wrappers to inject
+flags into the compile line of the build. Additionally, compiler flags are
+inherited by dependencies. ``spack install libdwarf cppflags=\"-g\"`` will
+install both libdwarf and libelf with the ``-g`` flag injected into their
+compile line.
+
+Notice that the value of the compiler flags must be escape quoted on the
+command line. From within python files, the same spec would be specified
+``libdwarf cppflags="-g"``. This is necessary because of how the shell
+handles the quote symbols.
-Architecture specifier
+The six compiler flags are injected in the order of implicit make commands
+in gnu autotools. If all flags are set, the order is
+``$cppflags $cflags|$cxxflags $ldflags command $ldlibs`` for C and C++ and
+``$fflags $cppflags $ldflags command $ldlibs`` for fortran.
+
+
+Architecture specifiers
~~~~~~~~~~~~~~~~~~~~~~~
.. Note::
@@ -690,12 +746,9 @@ Architecture specifier
Architecture specifiers are part of specs but are not yet
functional. They will be in Spack version 1.0, due in Q3 2015.
-The architecture specifier starts with a ``=`` and also comes after
-some package name within a spec. It allows a user to specify a
-particular architecture for the package to be built. This is mostly
-used for architectures that need cross-compilation, and in most cases,
-users will not need to specify the architecture when they install a
-package.
+The architecture specifier looks identical to a variant specifier for a
+non-boolean variant. The architecture can be specified only using the
+reserved name ``arch`` (``arch=bgq_os``).
.. _sec-virtual-dependencies:
@@ -773,6 +826,23 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
+Specifying Specs by Hash
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Complicated specs can become cumbersome to enter on the command line,
+especially when many of the qualifications are necessary to
+distinguish between similar installs, for example when using the
+``uninstall`` command. To avoid this, when referencing an existing spec,
+Spack allows you to reference specs by their hash. We previously
+discussed the spec hash that Spack computes. In place of a spec in any
+command, substitute ``/<hash>`` where ``<hash>`` is any amount from
+the beginning of a spec hash. If the given spec hash is sufficient
+to be unique, Spack will replace the reference with the spec to which
+it refers. Otherwise, it will prompt for a more qualified hash.
+
+Note that this will not work to reinstall a depencency uninstalled by
+``spack uninstall -f``.
+
.. _spack-providers:
``spack providers``
@@ -1002,8 +1072,8 @@ than one installed package matches it), then Spack will warn you:
$ spack load libelf
==> Error: Multiple matches for spec libelf. Choose one:
- libelf@0.8.13%gcc@4.4.7=chaos_5_x86_64_ib
- libelf@0.8.13%intel@15.0.0=chaos_5_x86_64_ib
+ libelf@0.8.13%gcc@4.4.7 arch=chaos_5_x86_64_ib
+ libelf@0.8.13%intel@15.0.0 arch=chaos_5_x86_64_ib
You can either type the ``spack load`` command again with a fully
qualified argument, or you can add just enough extra constraints to
@@ -1251,6 +1321,120 @@ regenerate all module and dotkit files from scratch:
.. _extensions:
+Filesystem Views
+-------------------------------
+
+.. Maybe this is not the right location for this documentation.
+
+The Spack installation area allows for many package installation trees
+to coexist and gives the user choices as to what versions and variants
+of packages to use. To use them, the user must rely on a way to
+aggregate a subset of those packages. The section on Environment
+Modules gives one good way to do that which relies on setting various
+environment variables. An alternative way to aggregate is through
+**filesystem views**.
+
+A filesystem view is a single directory tree which is the union of the
+directory hierarchies of the individual package installation trees
+that have been included. The files of the view's installed packages
+are brought into the view by symbolic or hard links back to their
+location in the original Spack installation area. As the view is
+formed, any clashes due to a file having the exact same path in its
+package installation tree are handled in a first-come-first-served
+basis and a warning is printed. Packages and their dependencies can
+be both added and removed. During removal, empty directories will be
+purged. These operations can be limited to pertain to just the
+packages listed by the user or to exclude specific dependencies and
+they allow for software installed outside of Spack to coexist inside
+the filesystem view tree.
+
+By its nature, a filesystem view represents a particular choice of one
+set of packages among all the versions and variants that are available
+in the Spack installation area. It is thus equivalent to the
+directory hiearchy that might exist under ``/usr/local``. While this
+limits a view to including only one version/variant of any package, it
+provides the benefits of having a simpler and traditional layout which
+may be used without any particular knowledge that its packages were
+built by Spack.
+
+Views can be used for a variety of purposes including:
+
+- A central installation in a traditional layout, eg ``/usr/local`` maintained over time by the sysadmin.
+- A self-contained installation area which may for the basis of a top-level atomic versioning scheme, eg ``/opt/pro`` vs ``/opt/dev``.
+- Providing an atomic and monolithic binary distribution, eg for delivery as a single tarball.
+- Producing ephemeral testing or developing environments.
+
+Using Filesystem Views
+~~~~~~~~~~~~~~~~~~~~~~
+
+A filesystem view is created and packages are linked in by the ``spack
+view`` command's ``symlink`` and ``hardlink`` sub-commands. The
+``spack view remove`` command can be used to unlink some or all of the
+filesystem view.
+
+The following example creates a filesystem view based
+on an installed ``cmake`` package and then removes from the view the
+files in the ``cmake`` package while retaining its dependencies.
+
+.. code-block:: sh
+
+
+ $ spack view -v symlink myview cmake@3.5.2
+ ==> Linking package: "ncurses"
+ ==> Linking package: "zlib"
+ ==> Linking package: "openssl"
+ ==> Linking package: "cmake"
+
+ $ ls myview/
+ bin doc etc include lib share
+
+ $ ls myview/bin/
+ captoinfo clear cpack ctest infotocap openssl tabs toe tset
+ ccmake cmake c_rehash infocmp ncurses6-config reset tic tput
+
+ $ spack view -v -d false rm myview cmake@3.5.2
+ ==> Removing package: "cmake"
+
+ $ ls myview/bin/
+ captoinfo c_rehash infotocap openssl tabs toe tset
+ clear infocmp ncurses6-config reset tic tput
+
+
+Limitations of Filesystem Views
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+This section describes some limitations that should be considered in
+using filesystems views.
+
+Filesystem views are merely organizational. The binary executable
+programs, shared libraries and other build products found in a view
+are mere links into the "real" Spack installation area. If a view is
+built with symbolic links it requires the Spack-installed package to
+be kept in place. Building a view with hardlinks removes this
+requirement but any internal paths (eg, rpath or ``#!`` interpreter
+specifications) will still require the Spack-installed package files
+to be in place.
+
+.. FIXME: reference the relocation work of Hegner and Gartung.
+
+As described above, when a view is built only a single instance of a
+file may exist in the unified filesystem tree. If more than one
+package provides a file at the same path (relative to its own root)
+then it is the first package added to the view that "wins". A warning
+is printed and it is up to the user to determine if the conflict
+matters.
+
+It is up to the user to assure a consistent view is produced. In
+particular if the user excludes packages, limits the following of
+dependencies or removes packages the view may become inconsistent. In
+particular, if two packages require the same sub-tree of dependencies,
+removing one package (recursively) will remove its dependencies and
+leave the other package broken.
+
+
+
+
+
Extensions & Python support
------------------------------------
@@ -1282,7 +1466,7 @@ You can find extensions for your Python installation like this:
.. code-block:: sh
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1372,9 +1556,9 @@ installation:
.. code-block:: sh
$ spack activate py-numpy
- ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
Several things have happened here. The user requested that
``py-numpy`` be activated in the ``python`` installation it was built
@@ -1389,7 +1573,7 @@ packages listed as activated:
.. code-block:: sh
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1437,7 +1621,7 @@ dependencies, you can use ``spack activate -f``:
.. code-block:: sh
$ spack activate -f py-numpy
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
.. _spack-deactivate:
diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst
index c0b79a7f44..c613071c65 100644
--- a/lib/spack/docs/configuration.rst
+++ b/lib/spack/docs/configuration.rst
@@ -70,9 +70,9 @@ directory. Here's an example of an external configuration:
packages:
openmpi:
paths:
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+ openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
This example lists three installations of OpenMPI, one built with gcc,
one built with gcc and debug information, and another built with Intel.
@@ -108,9 +108,9 @@ be:
packages:
openmpi:
paths:
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+ openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
buildable: False
The addition of the ``buildable`` flag tells Spack that it should never build
diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst
index 0998ba8da4..27a3b4b435 100644
--- a/lib/spack/docs/features.rst
+++ b/lib/spack/docs/features.rst
@@ -31,14 +31,21 @@ platform, all on the command line.
# Specify a compiler (and its version), with %
$ spack install mpileaks@1.1.2 %gcc@4.7.3
- # Add special compile-time options with +
+ # Add special compile-time options by name
+ $ spack install mpileaks@1.1.2 %gcc@4.7.3 debug=True
+
+ # Add special boolean compile-time options with +
$ spack install mpileaks@1.1.2 %gcc@4.7.3 +debug
- # Cross-compile for a different architecture with =
- $ spack install mpileaks@1.1.2 =bgqos_0
+ # Add compiler flags using the conventional names
+ $ spack install mpileaks@1.1.2 %gcc@4.7.3 cppflags=\"-O3 -floop-block\"
+
+ # Cross-compile for a different architecture with arch=
+ $ spack install mpileaks@1.1.2 arch=bgqos_0
-Users can specify as many or few options as they care about. Spack
-will fill in the unspecified values with sensible defaults.
+Users can specify as many or few options as they care about. Spack
+will fill in the unspecified values with sensible defaults. The two listed
+syntaxes for variants are identical when the value is boolean.
Customize dependencies
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 63c411ffb5..1f83f611b0 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1221,11 +1221,13 @@ just as easily provide a version range:
depends_on("libelf@0.8.2:0.8.4:")
-Or a requirement for a particular variant:
+Or a requirement for a particular variant or compiler flags:
.. code-block:: python
depends_on("libelf@0.8+debug")
+ depends_on('libelf debug=True')
+ depends_on('libelf cppflags="-fPIC")
Both users *and* package authors can use the same spec syntax to refer
to different package configurations. Users use the spec syntax on the
@@ -1623,21 +1625,21 @@ the user runs ``spack install`` and the time the ``install()`` method
is called. The concretized version of the spec above might look like
this::
- mpileaks@2.3%gcc@4.7.3=linux-ppc64
- ^callpath@1.0%gcc@4.7.3+debug=linux-ppc64
- ^dyninst@8.1.2%gcc@4.7.3=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3=linux-ppc64
- ^libelf@0.8.11%gcc@4.7.3=linux-ppc64
- ^mpich@3.0.4%gcc@4.7.3=linux-ppc64
+ mpileaks@2.3%gcc@4.7.3 arch=linux-ppc64
+ ^callpath@1.0%gcc@4.7.3+debug arch=linux-ppc64
+ ^dyninst@8.1.2%gcc@4.7.3 arch=linux-ppc64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
+ ^libelf@0.8.11%gcc@4.7.3 arch=linux-ppc64
+ ^mpich@3.0.4%gcc@4.7.3 arch=linux-ppc64
.. graphviz::
digraph {
- "mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
- "mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
- "callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
+ "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
}
Here, all versions, compilers, and platforms are filled in, and there
@@ -1666,9 +1668,9 @@ running ``spack spec``. For example:
^libdwarf
^libelf
- dyninst@8.0.1%gcc@4.7.3=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3=linux-ppc64
- ^libelf@0.8.13%gcc@4.7.3=linux-ppc64
+ dyninst@8.0.1%gcc@4.7.3 arch=linux-ppc64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
+ ^libelf@0.8.13%gcc@4.7.3 arch=linux-ppc64
This is useful when you want to know exactly what Spack will do when
you ask for a particular spec.
@@ -1908,6 +1910,12 @@ the command line.
``$rpath_flag`` can be overriden on a compiler specific basis in
``lib/spack/spack/compilers/$compiler.py``.
+The compiler wrappers also pass the compiler flags specified by the user from
+the command line (``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``,
+and/or ``ldlibs``). They do not override the canonical autotools flags with the
+same names (but in ALL-CAPS) that may be passed into the build by particularly
+challenging package scripts.
+
Compiler flags
~~~~~~~~~~~~~~
In rare circumstances such as compiling and running small unit tests, a package
@@ -2154,12 +2162,12 @@ example:
def install(self, prefix):
# Do default install
- @when('=chaos_5_x86_64_ib')
+ @when('arch=chaos_5_x86_64_ib')
def install(self, prefix):
# This will be executed instead of the default install if
# the package's sys_type() is chaos_5_x86_64_ib.
- @when('=bgqos_0")
+ @when('arch=bgqos_0")
def install(self, prefix):
# This will be executed if the package's sys_type is bgqos_0
@@ -2749,11 +2757,11 @@ build it:
$ spack stage libelf
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13.tar.gz
- ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64.
+ ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13.tar.gz
+ ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64.
$ spack cd libelf
$ pwd
- /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13
+ /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13
``spack cd`` here changed he current working directory to the
directory containing the expanded ``libelf`` source code. There are a
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 9758b74f37..bf98b4c354 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -174,6 +174,28 @@ if [[ -z $command ]]; then
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
fi
+#
+# Filter '.' and Spack environment directories out of PATH so that
+# this script doesn't just call itself
+#
+IFS=':' read -ra env_path <<< "$PATH"
+IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
+spack_env_dirs+=("" ".")
+PATH=""
+for dir in "${env_path[@]}"; do
+ addpath=true
+ for env_dir in "${spack_env_dirs[@]}"; do
+ if [[ $dir == $env_dir ]]; then
+ addpath=false
+ break
+ fi
+ done
+ if $addpath; then
+ PATH="${PATH:+$PATH:}$dir"
+ fi
+done
+export PATH
+
if [[ $mode == vcheck ]]; then
exec ${command} "$@"
fi
@@ -286,28 +308,6 @@ unset LD_LIBRARY_PATH
unset LD_RUN_PATH
unset DYLD_LIBRARY_PATH
-#
-# Filter '.' and Spack environment directories out of PATH so that
-# this script doesn't just call itself
-#
-IFS=':' read -ra env_path <<< "$PATH"
-IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
-spack_env_dirs+=("" ".")
-PATH=""
-for dir in "${env_path[@]}"; do
- addpath=true
- for env_dir in "${spack_env_dirs[@]}"; do
- if [[ $dir == $env_dir ]]; then
- addpath=false
- break
- fi
- done
- if $addpath; then
- PATH="${PATH:+$PATH:}$dir"
- fi
-done
-export PATH
-
full_command=("$command" "${args[@]}")
# In test command mode, write out full command for Spack tests.
@@ -324,8 +324,8 @@ fi
if [[ $SPACK_DEBUG == TRUE ]]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
- echo "[$mode] $command $input_command" >> $input_log
- echo "[$mode] ${full_command[@]}" >> $output_log
+ echo "[$mode] $command $input_command" >> "$input_log"
+ echo "[$mode] ${full_command[@]}" >> "$output_log"
fi
exec "${full_command[@]}"
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index c638b113fd..ee81e11a20 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -64,12 +64,14 @@ def info(message, *args, **kwargs):
format = kwargs.get('format', '*b')
stream = kwargs.get('stream', sys.stdout)
wrap = kwargs.get('wrap', False)
+ break_long_words = kwargs.get('break_long_words', False)
cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
for arg in args:
if wrap:
lines = textwrap.wrap(
- str(arg), initial_indent=indent, subsequent_indent=indent)
+ str(arg), initial_indent=indent, subsequent_indent=indent,
+ break_long_words=break_long_words)
for line in lines:
stream.write(line + '\n')
else:
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index 429ba45882..81a83691d7 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -198,8 +198,13 @@ def colify(elts, **options):
for col in xrange(cols):
elt = col * rows + row
width = config.widths[col] + cextra(elts[elt])
- fmt = '%%-%ds' % width
- output.write(fmt % elts[elt])
+ if col < cols - 1:
+ fmt = '%%-%ds' % width
+ output.write(fmt % elts[elt])
+ else:
+ # Don't pad the rightmost column (sapces can wrap on
+ # small teriminals if one line is overlong)
+ output.write(elts[elt])
output.write("\n")
row += 1
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 927e4c1422..965e3a7f78 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -39,7 +39,9 @@ spack_file = join_path(spack_root, "bin", "spack")
lib_path = join_path(spack_root, "lib", "spack")
build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
+platform_path = join_path(module_path, 'platforms')
compilers_path = join_path(module_path, "compilers")
+operating_system_path = join_path(module_path, 'operating_systems')
test_path = join_path(module_path, "test")
hooks_path = join_path(module_path, "hooks")
var_path = join_path(spack_root, "var", "spack")
@@ -109,7 +111,7 @@ concretizer = DefaultConcretizer()
# Version information
from spack.version import Version
-spack_version = Version("0.9")
+spack_version = Version("0.9.1")
#
# Executables used by Spack
diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py
index 91d1d2003d..38cff62af4 100644
--- a/lib/spack/spack/abi.py
+++ b/lib/spack/spack/abi.py
@@ -35,8 +35,9 @@ class ABI(object):
The current implementation is rather rough and could be improved."""
def architecture_compatible(self, parent, child):
- """Returns true iff the parent and child specs have ABI compatible architectures."""
- return not parent.architecture or not child.architecture or parent.architecture == child.architecture
+ """Returns true iff the parent and child specs have ABI compatible targets."""
+ return not parent.architecture or not child.architecture \
+ or parent.architecture == child.architecture
@memoized
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index b14cb2bea2..cbac7b41d6 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -22,68 +22,498 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+"""
+This module contains all the elements that are required to create an
+architecture object. These include, the target processor, the operating system,
+and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
+
+On a multiple architecture machine, the architecture spec field can be set to
+build a package against any target and operating system that is present on the
+platform. On Cray platforms or any other architecture that has different front
+and back end environments, the operating system will determine the method of
+compiler
+detection.
+
+There are two different types of compiler detection:
+ 1. Through the $PATH env variable (front-end detection)
+ 2. Through the tcl module system. (back-end detection)
+
+Depending on which operating system is specified, the compiler will be detected
+using one of those methods.
+
+For platforms such as linux and darwin, the operating system is autodetected
+and the target is set to be x86_64.
+
+The command line syntax for specifying an architecture is as follows:
+
+ target=<Target name> os=<OperatingSystem name>
+
+If the user wishes to use the defaults, either target or os can be left out of
+the command line and Spack will concretize using the default. These defaults
+are set in the 'platforms/' directory which contains the different subclasses
+for platforms. If the machine has multiple architectures, the user can
+also enter front-end, or fe or back-end or be. These settings will concretize
+to their respective front-end and back-end targets and operating systems.
+Additional platforms can be added by creating a subclass of Platform
+and adding it inside the platform directory.
+
+Platforms are an abstract class that are extended by subclasses. If the user
+wants to add a new type of platform (such as cray_xe), they can create a
+subclass and set all the class attributes such as priority, front_target,
+back_target, front_os, back_os. Platforms also contain a priority class
+attribute. A lower number signifies higher priority. These numbers are
+arbitrarily set and can be changed though often there isn't much need unless a
+new platform is added and the user wants that to be detected first.
+
+Targets are created inside the platform subclasses. Most architecture
+(like linux, and darwin) will have only one target (x86_64) but in the case of
+Cray machines, there is both a frontend and backend processor. The user can
+specify which targets are present on front-end and back-end architecture
+
+Depending on the platform, operating systems are either auto-detected or are
+set. The user can set the front-end and back-end operating setting by the class
+attributes front_os and back_os. The operating system as described earlier,
+will be responsible for compiler detection.
+"""
import os
-import re
-import platform
+import imp
+import inspect
-from llnl.util.lang import memoized
+from llnl.util.lang import memoized, list_modules, key_ordering
+from llnl.util.filesystem import join_path
+import llnl.util.tty as tty
import spack
+import spack.compilers
+from spack.util.naming import mod_to_class
+from spack.util.environment import get_path
+from spack.util.multiproc import parmap
import spack.error as serr
class InvalidSysTypeError(serr.SpackError):
def __init__(self, sys_type):
- super(InvalidSysTypeError,
- self).__init__("Invalid sys_type value for Spack: " + sys_type)
+ super(InvalidSysTypeError, self).__init__(
+ "Invalid sys_type value for Spack: " + sys_type)
class NoSysTypeError(serr.SpackError):
def __init__(self):
- super(NoSysTypeError,
- self).__init__("Could not determine sys_type for this machine.")
+ super(NoSysTypeError, self).__init__(
+ "Could not determine sys_type for this machine.")
+
+
+@key_ordering
+class Target(object):
+ """ Target is the processor of the host machine.
+ The host machine may have different front-end and back-end targets,
+ especially if it is a Cray machine. The target will have a name and
+ also the module_name (e.g craype-compiler). Targets will also
+ recognize which platform they came from using the set_platform method.
+ Targets will have compiler finding strategies
+ """
+
+ def __init__(self, name, module_name=None):
+ self.name = name # case of cray "ivybridge" but if it's x86_64
+ self.module_name = module_name # craype-ivybridge
+
+ # Sets only the platform name to avoid recursiveness
+
+ def _cmp_key(self):
+ return (self.name, self.module_name)
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return self.name
+
+
+@key_ordering
+class Platform(object):
+ """ Abstract class that each type of Platform will subclass.
+ Will return a instance of it once it
+ is returned
+ """
+
+ priority = None # Subclass sets number. Controls detection order
+ front_end = None
+ back_end = None
+ default = None # The default back end target. On cray ivybridge
+
+ front_os = None
+ back_os = None
+ default_os = None
+
+ def __init__(self, name):
+ self.targets = {}
+ self.operating_sys = {}
+ self.name = name
+
+ def add_target(self, name, target):
+ """Used by the platform specific subclass to list available targets.
+ Raises an error if the platform specifies a name
+ that is reserved by spack as an alias.
+ """
+ if name in ['frontend', 'fe', 'backend', 'be', 'default_target']:
+ raise ValueError(
+ "%s is a spack reserved alias "
+ "and cannot be the name of a target" % name)
+ self.targets[name] = target
+
+ def target(self, name):
+ """This is a getter method for the target dictionary
+ that handles defaulting based on the values provided by default,
+ front-end, and back-end. This can be overwritten
+ by a subclass for which we want to provide further aliasing options.
+ """
+ if name == 'default_target':
+ name = self.default
+ elif name == 'frontend' or name == 'fe':
+ name = self.front_end
+ elif name == 'backend' or name == 'be':
+ name = self.back_end
+
+ return self.targets.get(name, None)
+
+ def add_operating_system(self, name, os_class):
+ """ Add the operating_system class object into the
+ platform.operating_sys dictionary
+ """
+ if name in ['frontend', 'fe', 'backend', 'be', 'default_os']:
+ raise ValueError(
+ "%s is a spack reserved alias "
+ "and cannot be the name of an OS" % name)
+ self.operating_sys[name] = os_class
+
+ def operating_system(self, name):
+ if name == 'default_os':
+ name = self.default_os
+ if name == 'frontend' or name == "fe":
+ name = self.front_os
+ if name == 'backend' or name == 'be':
+ name = self.back_os
+
+ return self.operating_sys.get(name, None)
+
+
+ @classmethod
+ def detect(self):
+ """ Subclass is responsible for implementing this method.
+ Returns True if the Platform class detects that
+ it is the current platform
+ and False if it's not.
+ """
+ raise NotImplementedError()
+
+
+ def __repr__(self):
+ return self.__str__()
+
+
+ def __str__(self):
+ return self.name
+
+
+ def _cmp_key(self):
+ t_keys = ''.join(str(t._cmp_key()) for t in
+ sorted(self.targets.values()))
+ o_keys = ''.join(str(o._cmp_key()) for o in
+ sorted(self.operating_sys.values()))
+ return (self.name,
+ self.default,
+ self.front_end,
+ self.back_end,
+ self.default_os,
+ self.front_os,
+ self.back_os,
+ t_keys,
+ o_keys)
+
+
+@key_ordering
+class OperatingSystem(object):
+ """ Operating System will be like a class similar to platform extended
+ by subclasses for the specifics. Operating System will contain the
+ compiler finding logic. Instead of calling two separate methods to
+ find compilers we call find_compilers method for each operating system
+ """
+
+ def __init__(self, name, version):
+ self.name = name
+ self.version = version
+
+ def __str__(self):
+ return self.name + self.version
+
+ def __repr__(self):
+ return self.__str__()
+
+ def _cmp_key(self):
+ return (self.name, self.version)
+ def find_compilers(self, *paths):
+ """
+ Return a list of compilers found in the suppied paths.
+ This invokes the find() method for each Compiler class,
+ and appends the compilers detected to a list.
+ """
+ if not paths:
+ paths = get_path('PATH')
+ # Make sure path elements exist, and include /bin directories
+ # under prefixes.
+ filtered_path = []
+ for p in paths:
+ # Eliminate symlinks and just take the real directories.
+ p = os.path.realpath(p)
+ if not os.path.isdir(p):
+ continue
+ filtered_path.append(p)
-def get_sys_type_from_spack_globals():
- """Return the SYS_TYPE from spack globals, or None if it isn't set."""
- if not hasattr(spack, "sys_type"):
- return None
- elif hasattr(spack.sys_type, "__call__"):
- return spack.sys_type()
+ # Check for a bin directory, add it if it exists
+ bin = join_path(p, 'bin')
+ if os.path.isdir(bin):
+ filtered_path.append(os.path.realpath(bin))
+
+ # Once the paths are cleaned up, do a search for each type of
+ # compiler. We can spawn a bunch of parallel searches to reduce
+ # the overhead of spelunking all these directories.
+ types = spack.compilers.all_compiler_types()
+ compiler_lists = parmap(lambda cmp_cls:
+ self.find_compiler(cmp_cls, *filtered_path),
+ types)
+
+ # ensure all the version calls we made are cached in the parent
+ # process, as well. This speeds up Spack a lot.
+ clist = reduce(lambda x, y: x+y, compiler_lists)
+ return clist
+
+ def find_compiler(self, cmp_cls, *path):
+ """Try to find the given type of compiler in the user's
+ environment. For each set of compilers found, this returns
+ compiler objects with the cc, cxx, f77, fc paths and the
+ version filled in.
+
+ This will search for compilers with the names in cc_names,
+ cxx_names, etc. and it will group them if they have common
+ prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
+ be grouped with g++-mp-4.7 and gfortran-mp-4.7.
+ """
+ dicts = parmap(
+ lambda t: cmp_cls._find_matches_in_path(*t),
+ [(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path),
+ (cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
+ (cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
+ (cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)])
+
+ all_keys = set()
+ for d in dicts:
+ all_keys.update(d)
+
+ compilers = {}
+ for k in all_keys:
+ ver, pre, suf = k
+
+ # Skip compilers with unknown version.
+ if ver == 'unknown':
+ continue
+
+ paths = tuple(pn[k] if k in pn else None for pn in dicts)
+ spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
+
+ if ver in compilers:
+ prev = compilers[ver]
+
+ # prefer the one with more compilers.
+ prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
+ newcount = len([p for p in paths if p is not None])
+ prevcount = len([p for p in prev_paths if p is not None])
+
+ # Don't add if it's not an improvement over prev compiler.
+ if newcount <= prevcount:
+ continue
+
+ compilers[ver] = cmp_cls(spec, self, paths)
+
+ return list(compilers.values())
+
+ def to_dict(self):
+ d = {}
+ d['name'] = self.name
+ d['version'] = self.version
+ return d
+
+@key_ordering
+class Arch(object):
+ "Architecture is now a class to help with setting attributes"
+
+ def __init__(self, platform=None, platform_os=None, target=None):
+ self.platform = platform
+ if platform and platform_os:
+ platform_os = self.platform.operating_system(platform_os)
+ self.platform_os = platform_os
+ if platform and target:
+ target = self.platform.target(target)
+ self.target = target
+
+ # Hooks for parser to use when platform is set after target or os
+ self.target_string = None
+ self.os_string = None
+
+ @property
+ def concrete(self):
+ return all((self.platform is not None,
+ isinstance(self.platform, Platform),
+ self.platform_os is not None,
+ isinstance(self.platform_os, OperatingSystem),
+ self.target is not None, isinstance(self.target, Target)))
+
+ def __str__(self):
+ if self.platform or self.platform_os or self.target:
+ if self.platform.name == 'darwin':
+ os_name = self.platform_os.name if self.platform_os else "None"
+ else:
+ os_name = str(self.platform_os)
+
+ return (str(self.platform) + "-" +
+ os_name + "-" + str(self.target))
+ else:
+ return ''
+
+
+ def __contains__(self, string):
+ return string in str(self)
+
+
+ def _cmp_key(self):
+ if isinstance(self.platform, Platform):
+ platform = self.platform.name
+ else:
+ platform = self.platform
+ if isinstance(self.platform_os, OperatingSystem):
+ platform_os = self.platform_os.name
+ else:
+ platform_os = self.platform_os
+ if isinstance(self.target, Target):
+ target = self.target.name
+ else:
+ target = self.target
+ return (platform, platform_os, target)
+
+ def to_dict(self):
+ d = {}
+ d['platform'] = str(self.platform) if self.platform else None
+ d['platform_os'] = str(self.platform_os) if self.platform_os else None
+ d['target'] = str(self.target) if self.target else None
+
+ return d
+
+
+def _target_from_dict(target_name, platform=None):
+ """ Creates new instance of target and assigns all the attributes of
+ that target from the dictionary
+ """
+ if not platform:
+ platform = sys_type()
+ return platform.target(target_name)
+
+
+def _operating_system_from_dict(os_name, platform=None):
+ """ uses platform's operating system method to grab the constructed
+ operating systems that are valid on the platform.
+ """
+ if not platform:
+ platform = sys_type()
+ if isinstance(os_name, dict):
+ name = os_name['name']
+ version = os_name['version']
+ return platform.operating_system(name+version)
else:
- return spack.sys_type
+ return platform.operating_system(os_name)
+
+def _platform_from_dict(platform_name):
+ """ Constructs a platform from a dictionary. """
+ platform_list = all_platforms()
+ for p in platform_list:
+ if platform_name.replace("_", "").lower() == p.__name__.lower():
+ return p()
-def get_sys_type_from_environment():
- """Return $SYS_TYPE or None if it's not defined."""
- return os.environ.get('SYS_TYPE')
+def arch_from_dict(d):
+ """ Uses _platform_from_dict, _operating_system_from_dict, _target_from_dict
+ helper methods to recreate the arch tuple from the dictionary read from
+ a yaml file
+ """
+ arch = Arch()
-def get_sys_type_from_platform():
- """Return the architecture from Python's platform module."""
- sys_type = platform.system() + '-' + platform.machine()
- sys_type = re.sub(r'[^\w-]', '_', sys_type)
- return sys_type.lower()
+ if isinstance(d, basestring):
+ # We have an old spec using a string for the architecture
+ arch.platform = Platform('spack_compatibility')
+ arch.platform_os = OperatingSystem('unknown', '')
+ arch.target = Target(d)
+
+ arch.os_string = None
+ arch.target_string = None
+ else:
+ if d is None:
+ return None
+ platform_name = d['platform']
+ os_name = d['platform_os']
+ target_name = d['target']
+
+ if platform_name:
+ arch.platform = _platform_from_dict(platform_name)
+ else:
+ arch.platform = None
+ if target_name:
+ arch.target = _target_from_dict(target_name, arch.platform)
+ else:
+ arch.target = None
+ if os_name:
+ arch.platform_os = _operating_system_from_dict(os_name,
+ arch.platform)
+ else:
+ arch.platform_os = None
+
+ arch.os_string = None
+ arch.target_string = None
+
+ return arch
@memoized
-def sys_type():
- """Returns a SysType for the current machine."""
- methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
- get_sys_type_from_platform]
+def all_platforms():
+ classes = []
+ mod_path = spack.platform_path
+ parent_module = "spack.platforms"
+
+ for name in list_modules(mod_path):
+ mod_name = '%s.%s' % (parent_module, name)
+ class_name = mod_to_class(name)
+ mod = __import__(mod_name, fromlist=[class_name])
+ if not hasattr(mod, class_name):
+ tty.die('No class %s defined in %s' % (class_name, mod_name))
+ cls = getattr(mod, class_name)
+ if not inspect.isclass(cls):
+ tty.die('%s.%s is not a class' % (mod_name, class_name))
+
+ classes.append(cls)
- # search for a method that doesn't return None
- sys_type = None
- for method in methods:
- sys_type = method()
- if sys_type:
- break
+ return classes
- # Couldn't determine the sys_type for this machine.
- if sys_type is None:
- return "unknown_arch"
- if not isinstance(sys_type, basestring):
- raise InvalidSysTypeError(sys_type)
+@memoized
+def sys_type():
+ """ Gather a list of all available subclasses of platforms.
+ Sorts the list according to their priority looking. Priority is
+ an arbitrarily set number. Detects platform either using uname or
+ a file path (/opt/cray...)
+ """
+ # Try to create a Platform object using the config file FIRST
+ platform_list = all_platforms()
+ platform_list.sort(key=lambda a: a.priority)
- return sys_type
+ for platform in platform_list:
+ if platform.detect():
+ return platform()
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index d87aaa6285..7c65091d49 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -113,9 +113,66 @@ class MakeExecutable(Executable):
return super(MakeExecutable, self).__call__(*args, **kwargs)
+def load_module(mod):
+ """Takes a module name and removes modules until it is possible to
+ load that module. It then loads the provided module. Depends on the
+ modulecmd implementation of modules used in cray and lmod.
+ """
+ #Create an executable of the module command that will output python code
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Read the module and remove any conflicting modules
+ # We do this without checking that they are already installed
+ # for ease of programming because unloading a module that is not
+ # loaded does nothing.
+ text = modulecmd('show', mod, output=str, error=str).split()
+ for i, word in enumerate(text):
+ if word == 'conflict':
+ exec(compile(modulecmd('unload', text[i+1], output=str, error=str), '<string>', 'exec'))
+ # Load the module now that there are no conflicts
+ load = modulecmd('load', mod, output=str, error=str)
+ exec(compile(load, '<string>', 'exec'))
+
+def get_path_from_module(mod):
+ """Inspects a TCL module for entries that indicate the absolute path
+ at which the library supported by said module can be found.
+ """
+ # Create a modulecmd executable
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Read the module
+ text = modulecmd('show', mod, output=str, error=str).split('\n')
+ # If it lists its package directory, return that
+ for line in text:
+ if line.find(mod.upper()+'_DIR') >= 0:
+ words = line.split()
+ return words[2]
+
+ # If it lists a -rpath instruction, use that
+ for line in text:
+ rpath = line.find('-rpath/')
+ if rpath >= 0:
+ return line[rpath+6:line.find('/lib')]
+
+ # If it lists a -L instruction, use that
+ for line in text:
+ L = line.find('-L/')
+ if L >= 0:
+ return line[L+2:line.find('/lib')]
+
+ # If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that
+ for line in text:
+ if line.find('LD_LIBRARY_PATH') >= 0:
+ words = line.split()
+ path = words[2]
+ return path[:path.find('/lib')]
+ # Unable to find module path
+ return None
def set_compiler_environment_variables(pkg, env):
- assert pkg.spec.concrete
+ assert(pkg.spec.concrete)
compiler = pkg.compiler
flags = pkg.spec.compiler_flags
@@ -154,6 +211,10 @@ def set_compiler_environment_variables(pkg, env):
env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
+
+ for mod in compiler.modules:
+ load_module(mod)
+
return env
@@ -212,13 +273,15 @@ def set_build_environment_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
# Add any pkgconfig directories to PKG_CONFIG_PATH
- pkg_config_dirs = []
- for p in dep_prefixes:
- for maybe in ('lib', 'lib64', 'share'):
- pcdir = join_path(p, maybe, 'pkgconfig')
+ for pre in dep_prefixes:
+ for directory in ('lib', 'lib64', 'share'):
+ pcdir = join_path(pre, directory, 'pkgconfig')
if os.path.isdir(pcdir):
- pkg_config_dirs.append(pcdir)
- env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
+ #pkg_config_dirs.append(pcdir)
+ env.prepend_path('PKG_CONFIG_PATH',pcdir)
+
+ if pkg.spec.architecture.target.module_name:
+ load_module(pkg.spec.architecture.target.module_name)
return env
@@ -301,6 +364,10 @@ def get_rpaths(pkg):
if os.path.isdir(d.prefix.lib))
rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
if os.path.isdir(d.prefix.lib64))
+ # Second module is our compiler mod name. We use that to get rpaths from
+ # module show output.
+ if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
+ rpaths.append(get_path_from_module(pkg.compiler.modules[1]))
return rpaths
@@ -317,6 +384,13 @@ def parent_class_modules(cls):
return result
+def load_external_modules(pkg):
+ """ traverse the spec list and find any specs that have external modules.
+ """
+ for dep in list(pkg.spec.traverse()):
+ if dep.external_module:
+ load_module(dep.external_module)
+
def setup_package(pkg):
"""Execute all environment setup routines."""
spack_env = EnvironmentModifications()
@@ -340,7 +414,7 @@ def setup_package(pkg):
set_compiler_environment_variables(pkg, spack_env)
set_build_environment_variables(pkg, spack_env)
-
+ load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
for dspec in pkg.spec.traverse(order='post', root=False):
diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py
index dc96dd0faa..cf2f96fd21 100644
--- a/lib/spack/spack/cmd/arch.py
+++ b/lib/spack/spack/cmd/arch.py
@@ -28,8 +28,4 @@ import spack.architecture as architecture
description = "Print the architecture for this machine"
def arch(parser, args):
- configured_sys_type = architecture.get_sys_type_from_spack_globals()
- if not configured_sys_type:
- configured_sys_type = "autodetect"
- print "Configured sys_type: %s" % configured_sys_type
- print "Autodetected default sys_type: %s" % architecture.sys_type()
+ print architecture.sys_type()
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index dc7731a290..c95045ef85 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -70,7 +70,7 @@ def setup_parser(subparser):
def compiler_find(args):
- """Search either $PATH or a list of paths for compilers and add them
+ """Search either $PATH or a list of paths OR MODULES for compilers and add them
to Spack's configuration."""
paths = args.add_paths
if not paths:
@@ -78,7 +78,6 @@ def compiler_find(args):
compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
-
if compilers:
spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
n = len(compilers)
@@ -93,7 +92,6 @@ def compiler_find(args):
def compiler_remove(args):
cspec = CompilerSpec(args.compiler_spec)
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
-
if not compilers:
tty.die("No compilers match spec %s" % cspec)
elif not args.all and len(compilers) > 1:
@@ -121,6 +119,8 @@ def compiler_info(args):
print "\tcxx = %s" % c.cxx
print "\tf77 = %s" % c.f77
print "\tfc = %s" % c.fc
+ print "\tmodules = %s" % c.modules
+ print "\toperating system = %s" % c.operating_system
def compiler_list(args):
@@ -135,10 +135,10 @@ def compiler_list(args):
def compiler(parser, args):
- action = { 'add' : compiler_find,
- 'find' : compiler_find,
- 'remove' : compiler_remove,
- 'rm' : compiler_remove,
- 'info' : compiler_info,
- 'list' : compiler_list }
+ action = {'add' : compiler_find,
+ 'find' : compiler_find,
+ 'remove' : compiler_remove,
+ 'rm' : compiler_remove,
+ 'info' : compiler_info,
+ 'list' : compiler_list }
action[args.compiler_command](args)
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index c22268d534..3ec671f93f 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -31,6 +31,7 @@ import spack.spec
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
+from llnl.util.lang import *
description = "Find installed spack packages"
@@ -85,6 +86,11 @@ def setup_parser(subparser):
action='store_true',
dest='missing',
help='Show missing dependencies as well as installed specs.')
+ subparser.add_argument(
+ '-v', '--variants',
+ action='store_true',
+ dest='variants',
+ help='Show variants in output (can be long)')
subparser.add_argument('-M', '--only-missing',
action='store_true',
dest='only_missing',
@@ -106,6 +112,8 @@ def display_specs(specs, **kwargs):
mode = kwargs.get('mode', 'short')
hashes = kwargs.get('long', False)
namespace = kwargs.get('namespace', False)
+ flags = kwargs.get('show_flags', False)
+ variants = kwargs.get('variants', False)
hlen = 7
if kwargs.get('very_long', False):
@@ -113,10 +121,9 @@ def display_specs(specs, **kwargs):
hlen = None
nfmt = '.' if namespace else '_'
- format_string = '$%s$@$+' % nfmt
- flags = kwargs.get('show_flags', False)
- if flags:
- format_string = '$%s$@$%%+$+' % nfmt
+ ffmt = '$%+' if flags else ''
+ vfmt = '$+' if variants else ''
+ format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
# Make a dict with specs keyed by architecture and compiler.
index = index_by(specs, ('architecture', 'compiler'))
@@ -162,7 +169,7 @@ def display_specs(specs, **kwargs):
string = ""
if hashes:
string += gray_hash(s, hlen) + ' '
- string += s.format('$-%s$@$+' % nfmt, color=True)
+ string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
return string
@@ -180,6 +187,29 @@ def display_specs(specs, **kwargs):
"deps, short)." % mode) # NOQA: ignore=E501
+def query_arguments(args):
+ # Check arguments
+ if args.explicit and args.implicit:
+ tty.error('You can\'t pass -E and -e options simultaneously.')
+ raise SystemExit(1)
+
+ # Set up query arguments.
+ installed, known = True, any
+ if args.only_missing:
+ installed = False
+ elif args.missing:
+ installed = any
+ if args.unknown:
+ known = False
+ explicit = any
+ if args.explicit:
+ explicit = True
+ if args.implicit:
+ explicit = False
+ q_args = {'installed': installed, 'known': known, "explicit": explicit}
+ return q_args
+
+
def find(parser, args):
# Filter out specs that don't exist.
query_specs = spack.cmd.parse_specs(args.query_specs)
@@ -194,22 +224,7 @@ def find(parser, args):
if not query_specs:
return
- # Set up query arguments.
- installed, known = True, any
- if args.only_missing:
- installed = False
- elif args.missing:
- installed = any
- if args.unknown:
- known = False
-
- explicit = any
- if args.explicit:
- explicit = False
- if args.implicit:
- explicit = True
-
- q_args = {'installed': installed, 'known': known, "explicit": explicit}
+ q_args = query_arguments(args)
# Get all the specs the user asked for
if not query_specs:
@@ -228,4 +243,6 @@ def find(parser, args):
mode=args.mode,
long=args.long,
very_long=args.very_long,
- show_flags=args.show_flags)
+ show_flags=args.show_flags,
+ namespace=args.namespace,
+ variants=args.variants)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 9fdf3045b2..a6f08d09ed 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -39,6 +39,13 @@ error_message = """You can either:
b) use spack uninstall -a to uninstall ALL matching specs.
"""
+# Arguments for display_specs when we find ambiguity
+display_args = {
+ 'long': True,
+ 'show_flags': True,
+ 'variants':True
+}
+
def ask_for_confirmation(message):
while True:
@@ -92,7 +99,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec)
print()
- display_specs(matching, long=True, show_flags=True)
+ display_specs(matching, **display_args)
print()
has_errors = True
@@ -172,7 +179,7 @@ def uninstall(parser, args):
tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
print('')
print("The following packages depend on it:")
- display_specs(lst, long=True)
+ display_specs(lst, **display_args)
print('')
has_error = True
elif args.dependents:
@@ -186,7 +193,7 @@ def uninstall(parser, args):
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print('')
- display_specs(uninstall_list, long=True, show_flags=True)
+ display_specs(uninstall_list, **display_args)
print('')
ask_for_confirmation('Do you want to proceed ? ')
diff --git a/lib/spack/spack/cmd/view.py b/lib/spack/spack/cmd/view.py
new file mode 100644
index 0000000000..8f1fc9be74
--- /dev/null
+++ b/lib/spack/spack/cmd/view.py
@@ -0,0 +1,295 @@
+##############################################################################
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+'''Produce a "view" of a Spack DAG.
+
+A "view" is file hierarchy representing the union of a number of
+Spack-installed package file hierarchies. The union is formed from:
+
+- specs resolved from the package names given by the user (the seeds)
+
+- all depenencies of the seeds unless user specifies `--no-depenencies`
+
+- less any specs with names matching the regular expressions given by
+ `--exclude`
+
+The `view` can be built and tore down via a number of methods (the "actions"):
+
+- symlink :: a file system view which is a directory hierarchy that is
+ the union of the hierarchies of the installed packages in the DAG
+ where installed files are referenced via symlinks.
+
+- hardlink :: like the symlink view but hardlinks are used.
+
+- statlink :: a view producing a status report of a symlink or
+ hardlink view.
+
+The file system view concept is imspired by Nix, implemented by
+brett.viren@gmail.com ca 2016.
+
+'''
+# Implementation notes:
+#
+# This is implemented as a visitor pattern on the set of package specs.
+#
+# The command line ACTION maps to a visitor_*() function which takes
+# the set of package specs and any args which may be specific to the
+# ACTION.
+#
+# To add a new view:
+# 1. add a new cmd line args sub parser ACTION
+# 2. add any action-specific options/arguments, most likely a list of specs.
+# 3. add a visitor_MYACTION() function
+# 4. add any visitor_MYALIAS assignments to match any command line aliases
+
+import os
+import re
+import spack
+import spack.cmd
+import llnl.util.tty as tty
+
+description = "Produce a single-rooted directory view of a spec."
+
+
+def setup_parser(sp):
+ setup_parser.parser = sp
+
+ sp.add_argument(
+ '-v', '--verbose', action='store_true', default=False,
+ help="Display verbose output.")
+ sp.add_argument(
+ '-e', '--exclude', action='append', default=[],
+ help="Exclude packages with names matching the given regex pattern.")
+ sp.add_argument(
+ '-d', '--dependencies', choices=['true', 'false', 'yes', 'no'],
+ default='true',
+ help="Follow dependencies.")
+
+ ssp = sp.add_subparsers(metavar='ACTION', dest='action')
+
+ specs_opts = dict(metavar='spec', nargs='+',
+ help="Seed specs of the packages to view.")
+
+ # The action parameterizes the command but in keeping with Spack
+ # patterns we make it a subcommand.
+ file_system_view_actions = [
+ ssp.add_parser(
+ 'symlink', aliases=['add', 'soft'],
+ help='Add package files to a filesystem view via symbolic links.'),
+ ssp.add_parser(
+ 'hardlink', aliases=['hard'],
+ help='Add packages files to a filesystem via via hard links.'),
+ ssp.add_parser(
+ 'remove', aliases=['rm'],
+ help='Remove packages from a filesystem view.'),
+ ssp.add_parser(
+ 'statlink', aliases=['status', 'check'],
+ help='Check status of packages in a filesystem view.')
+ ]
+ # All these options and arguments are common to every action.
+ for act in file_system_view_actions:
+ act.add_argument('path', nargs=1,
+ help="Path to file system view directory.")
+ act.add_argument('specs', **specs_opts)
+
+ return
+
+
+def assuredir(path):
+ 'Assure path exists as a directory'
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+
+def relative_to(prefix, path):
+ 'Return end of `path` relative to `prefix`'
+ assert 0 == path.find(prefix)
+ reldir = path[len(prefix):]
+ if reldir.startswith('/'):
+ reldir = reldir[1:]
+ return reldir
+
+
+def transform_path(spec, path, prefix=None):
+ 'Return the a relative path corresponding to given path spec.prefix'
+ if os.path.isabs(path):
+ path = relative_to(spec.prefix, path)
+ subdirs = path.split(os.path.sep)
+ if subdirs[0] == '.spack':
+ lst = ['.spack', spec.name] + subdirs[1:]
+ path = os.path.join(*lst)
+ if prefix:
+ path = os.path.join(prefix, path)
+ return path
+
+
+def purge_empty_directories(path):
+ '''Ascend up from the leaves accessible from `path`
+ and remove empty directories.'''
+ for dirpath, subdirs, files in os.walk(path, topdown=False):
+ for sd in subdirs:
+ sdp = os.path.join(dirpath, sd)
+ try:
+ os.rmdir(sdp)
+ except OSError:
+ pass
+
+
+def filter_exclude(specs, exclude):
+ 'Filter specs given sequence of exclude regex'
+ to_exclude = [re.compile(e) for e in exclude]
+
+ def exclude(spec):
+ for e in to_exclude:
+ if e.match(spec.name):
+ return True
+ return False
+ return [s for s in specs if not exclude(s)]
+
+
+def flatten(seeds, descend=True):
+ 'Normalize and flattend seed specs and descend hiearchy'
+ flat = set()
+ for spec in seeds:
+ if not descend:
+ flat.add(spec)
+ continue
+ flat.update(spec.normalized().traverse())
+ return flat
+
+
+def check_one(spec, path, verbose=False):
+ 'Check status of view in path against spec'
+ dotspack = os.path.join(path, '.spack', spec.name)
+ if os.path.exists(os.path.join(dotspack)):
+ tty.info('Package in view: "%s"' % spec.name)
+ return
+ tty.info('Package not in view: "%s"' % spec.name)
+ return
+
+
+def remove_one(spec, path, verbose=False):
+ 'Remove any files found in `spec` from `path` and purge empty directories.'
+
+ if not os.path.exists(path):
+ return # done, short circuit
+
+ dotspack = transform_path(spec, '.spack', path)
+ if not os.path.exists(dotspack):
+ if verbose:
+ tty.info('Skipping nonexistent package: "%s"' % spec.name)
+ return
+
+ if verbose:
+ tty.info('Removing package: "%s"' % spec.name)
+ for dirpath, dirnames, filenames in os.walk(spec.prefix):
+ if not filenames:
+ continue
+ targdir = transform_path(spec, dirpath, path)
+ for fname in filenames:
+ dst = os.path.join(targdir, fname)
+ if not os.path.exists(dst):
+ continue
+ os.unlink(dst)
+
+
+def link_one(spec, path, link=os.symlink, verbose=False):
+ 'Link all files in `spec` into directory `path`.'
+
+ dotspack = transform_path(spec, '.spack', path)
+ if os.path.exists(dotspack):
+ tty.warn('Skipping existing package: "%s"' % spec.name)
+ return
+
+ if verbose:
+ tty.info('Linking package: "%s"' % spec.name)
+ for dirpath, dirnames, filenames in os.walk(spec.prefix):
+ if not filenames:
+ continue # avoid explicitly making empty dirs
+
+ targdir = transform_path(spec, dirpath, path)
+ assuredir(targdir)
+
+ for fname in filenames:
+ src = os.path.join(dirpath, fname)
+ dst = os.path.join(targdir, fname)
+ if os.path.exists(dst):
+ if '.spack' in dst.split(os.path.sep):
+ continue # silence these
+ tty.warn("Skipping existing file: %s" % dst)
+ continue
+ link(src, dst)
+
+
+def visitor_symlink(specs, args):
+ 'Symlink all files found in specs'
+ path = args.path[0]
+ assuredir(path)
+ for spec in specs:
+ link_one(spec, path, verbose=args.verbose)
+visitor_add = visitor_symlink
+visitor_soft = visitor_symlink
+
+
+def visitor_hardlink(specs, args):
+ 'Hardlink all files found in specs'
+ path = args.path[0]
+ assuredir(path)
+ for spec in specs:
+ link_one(spec, path, os.link, verbose=args.verbose)
+visitor_hard = visitor_hardlink
+
+
+def visitor_remove(specs, args):
+ 'Remove all files and directories found in specs from args.path'
+ path = args.path[0]
+ for spec in specs:
+ remove_one(spec, path, verbose=args.verbose)
+ purge_empty_directories(path)
+visitor_rm = visitor_remove
+
+
+def visitor_statlink(specs, args):
+ 'Give status of view in args.path relative to specs'
+ path = args.path[0]
+ for spec in specs:
+ check_one(spec, path, verbose=args.verbose)
+visitor_status = visitor_statlink
+visitor_check = visitor_statlink
+
+
+def view(parser, args):
+ 'Produce a view of a set of packages.'
+
+ # Process common args
+ seeds = [spack.cmd.disambiguate_spec(s) for s in args.specs]
+ specs = flatten(seeds, args.dependencies.lower() in ['yes', 'true'])
+ specs = filter_exclude(specs, args.exclude)
+
+ # Execute the visitation.
+ try:
+ visitor = globals()['visitor_' + args.action]
+ except KeyError:
+ tty.error('Unknown action: "%s"' % args.action)
+ visitor(specs, args)
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 2ae305f201..ce4555bc56 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -33,6 +33,7 @@ from llnl.util.filesystem import join_path
import spack.error
import spack.spec
+import spack.architecture
from spack.util.multiproc import parmap
from spack.util.executable import *
from spack.util.environment import get_path
@@ -107,19 +108,32 @@ class Compiler(object):
@property
def fc_rpath_arg(self):
return '-Wl,-rpath,'
+ # Cray PrgEnv name that can be used to load this compiler
+ PrgEnv = None
+ # Name of module used to switch versions of this compiler
+ PrgEnv_compiler = None
-
- def __init__(self, cspec, cc, cxx, f77, fc, **kwargs):
+ def __init__(self, cspec, operating_system,
+ paths, modules=[], alias=None, **kwargs):
def check(exe):
if exe is None:
return None
_verify_executables(exe)
return exe
- self.cc = check(cc)
- self.cxx = check(cxx)
- self.f77 = check(f77)
- self.fc = check(fc)
+ self.cc = check(paths[0])
+ self.cxx = check(paths[1])
+ if len(paths) > 2:
+ self.f77 = check(paths[2])
+ if len(paths) == 3:
+ self.fc = self.f77
+ else:
+ self.fc = check(paths[3])
+
+ #self.cc = check(cc)
+ #self.cxx = check(cxx)
+ #self.f77 = check(f77)
+ #self.fc = check(fc)
# Unfortunately have to make sure these params are accepted
# in the same order they are returned by sorted(flags)
@@ -130,8 +144,10 @@ class Compiler(object):
if value is not None:
self.flags[flag] = value.split()
+ self.operating_system = operating_system
self.spec = cspec
-
+ self.modules = modules
+ self.alias = alias
@property
def version(self):
@@ -258,57 +274,6 @@ class Compiler(object):
successful.reverse()
return dict(((v, p, s), path) for v, p, s, path in successful)
- @classmethod
- def find(cls, *path):
- """Try to find this type of compiler in the user's
- environment. For each set of compilers found, this returns
- compiler objects with the cc, cxx, f77, fc paths and the
- version filled in.
-
- This will search for compilers with the names in cc_names,
- cxx_names, etc. and it will group them if they have common
- prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
- be grouped with g++-mp-4.7 and gfortran-mp-4.7.
- """
- dicts = parmap(
- lambda t: cls._find_matches_in_path(*t),
- [(cls.cc_names, cls.cc_version) + tuple(path),
- (cls.cxx_names, cls.cxx_version) + tuple(path),
- (cls.f77_names, cls.f77_version) + tuple(path),
- (cls.fc_names, cls.fc_version) + tuple(path)])
-
- all_keys = set()
- for d in dicts:
- all_keys.update(d)
-
- compilers = {}
- for k in all_keys:
- ver, pre, suf = k
-
- # Skip compilers with unknown version.
- if ver == 'unknown':
- continue
-
- paths = tuple(pn[k] if k in pn else None for pn in dicts)
- spec = spack.spec.CompilerSpec(cls.name, ver)
-
- if ver in compilers:
- prev = compilers[ver]
-
- # prefer the one with more compilers.
- prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
- newcount = len([p for p in paths if p is not None])
- prevcount = len([p for p in prev_paths if p is not None])
-
- # Don't add if it's not an improvement over prev compiler.
- if newcount <= prevcount:
- continue
-
- compilers[ver] = cls(spec, *paths)
-
- return list(compilers.values())
-
-
def __repr__(self):
"""Return a string representation of the compiler toolchain."""
return self.__str__()
@@ -317,7 +282,7 @@ class Compiler(object):
def __str__(self):
"""Return a string representation of the compiler toolchain."""
return "%s(%s)" % (
- self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
+ self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc, self.modules, str(self.operating_system)))))
class CompilerAccessError(spack.error.SpackError):
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 7c951ae8bc..ae72b743b2 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -28,6 +28,11 @@ system and configuring Spack to use multiple compilers.
import imp
import os
import platform
+import copy
+import hashlib
+import base64
+import yaml
+import sys
from llnl.util.lang import memoized, list_modules
from llnl.util.filesystem import join_path
@@ -45,7 +50,9 @@ from spack.util.naming import mod_to_class
from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
-_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
+_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
+_other_instance_vars = ['modules', 'operating_system']
+_cache_config_file = []
# TODO: customize order in config file
if platform.system() == 'Darwin':
@@ -64,107 +71,105 @@ def _auto_compiler_spec(function):
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
- return {
- str(compiler.spec) : dict(
- (attr, getattr(compiler, attr, None))
- for attr in _required_instance_vars)
- }
+ d = {}
+ d['spec'] = str(compiler.spec)
+ d['paths'] = dict( (attr, getattr(compiler, attr, None)) for attr in _path_instance_vars )
+ d['operating_system'] = str(compiler.operating_system)
+ d['modules'] = compiler.modules if compiler.modules else []
+ if compiler.alias:
+ d['alias'] = compiler.alias
-def get_compiler_config(arch=None, scope=None):
+ return {'compiler': d}
+
+
+def get_compiler_config(scope=None):
"""Return the compiler configuration for the specified architecture.
"""
- # Check whether we're on a front-end (native) architecture.
- my_arch = spack.architecture.sys_type()
- if arch is None:
- arch = my_arch
-
def init_compiler_config():
"""Compiler search used when Spack has no compilers."""
- config[arch] = {}
- compilers = find_compilers(*get_path('PATH'))
+ compilers = find_compilers()
+ compilers_dict = []
for compiler in compilers:
- config[arch].update(_to_dict(compiler))
- spack.config.update_config('compilers', config, scope=scope)
+ compilers_dict.append(_to_dict(compiler))
+ spack.config.update_config('compilers', compilers_dict, scope=scope)
config = spack.config.get_config('compilers', scope=scope)
-
# Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones.
- if arch == my_arch and arch not in config:
+ if not config:
if scope is None:
# We know no compilers were configured in any scope.
init_compiler_config()
+ config = spack.config.get_config('compilers', scope=scope)
elif scope == 'user':
# Check the site config and update the user config if
# nothing is configured at the site level.
site_config = spack.config.get_config('compilers', scope='site')
if not site_config:
init_compiler_config()
-
- return config[arch] if arch in config else {}
+ config = spack.config.get_config('compilers', scope=scope)
+ return config
+ elif config:
+ return config
+ else:
+ return [] # Return empty list which we will later append to.
-def add_compilers_to_config(compilers, arch=None, scope=None):
+def add_compilers_to_config(compilers, scope=None):
"""Add compilers to the config for the specified architecture.
Arguments:
- compilers: a list of Compiler objects.
- - arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
- if arch is None:
- arch = spack.architecture.sys_type()
-
- compiler_config = get_compiler_config(arch, scope)
+ compiler_config = get_compiler_config(scope)
for compiler in compilers:
- compiler_config[str(compiler.spec)] = dict(
- (c, getattr(compiler, c, "None"))
- for c in _required_instance_vars)
-
- update = { arch : compiler_config }
- spack.config.update_config('compilers', update, scope)
+ compiler_config.append(_to_dict(compiler))
+ global _cache_config_file
+ _cache_config_file = compiler_config
+ spack.config.update_config('compilers', compiler_config, scope)
@_auto_compiler_spec
-def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
+def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from the config, by spec.
Arguments:
- compiler_specs: a list of CompilerSpec objects.
- - arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
- if arch is None:
- arch = spack.architecture.sys_type()
-
- compiler_config = get_compiler_config(arch, scope)
- del compiler_config[str(compiler_spec)]
- update = { arch : compiler_config }
-
- spack.config.update_config('compilers', update, scope)
-
-
-def all_compilers_config(arch=None, scope=None):
+ compiler_config = get_compiler_config(scope)
+ config_length = len(compiler_config)
+
+ filtered_compiler_config = [comp for comp in compiler_config
+ if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
+ # Need a better way for this
+ global _cache_config_file
+ _cache_config_file = filtered_compiler_config # Update the cache for changes
+ if len(filtered_compiler_config) == config_length: # No items removed
+ CompilerSpecInsufficientlySpecificError(compiler_spec)
+ spack.config.update_config('compilers', filtered_compiler_config, scope)
+
+
+def all_compilers_config(scope=None):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
# Get compilers for this architecture.
- arch_config = get_compiler_config(arch, scope)
-
- # Merge 'all' compilers with arch-specific ones.
- # Arch-specific compilers have higher precedence.
- merged_config = get_compiler_config('all', scope=scope)
- merged_config = spack.config._merge_yaml(merged_config, arch_config)
-
- return merged_config
+ global _cache_config_file #Create a cache of the config file so we don't load all the time.
+ if not _cache_config_file:
+ _cache_config_file = get_compiler_config(scope)
+ return _cache_config_file
+ else:
+ return _cache_config_file
-def all_compilers(arch=None, scope=None):
+def all_compilers(scope=None):
# Return compiler specs from the merged config.
- return [spack.spec.CompilerSpec(s)
- for s in all_compilers_config(arch, scope)]
+ return [spack.spec.CompilerSpec(s['compiler']['spec'])
+ for s in all_compilers_config(scope)]
def default_compiler():
@@ -179,36 +184,18 @@ def default_compiler():
return sorted(versions)[-1]
-def find_compilers(*path):
+def find_compilers(*paths):
"""Return a list of compilers found in the suppied paths.
- This invokes the find() method for each Compiler class,
- and appends the compilers detected to a list.
+ This invokes the find_compilers() method for each operating
+ system associated with the host platform, and appends
+ the compilers detected to a list.
"""
- # Make sure path elements exist, and include /bin directories
- # under prefixes.
- filtered_path = []
- for p in path:
- # Eliminate symlinks and just take the real directories.
- p = os.path.realpath(p)
- if not os.path.isdir(p):
- continue
- filtered_path.append(p)
-
- # Check for a bin directory, add it if it exists
- bin = join_path(p, 'bin')
- if os.path.isdir(bin):
- filtered_path.append(os.path.realpath(bin))
-
- # Once the paths are cleaned up, do a search for each type of
- # compiler. We can spawn a bunch of parallel searches to reduce
- # the overhead of spelunking all these directories.
- types = all_compiler_types()
- compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
-
- # ensure all the version calls we made are cached in the parent
- # process, as well. This speeds up Spack a lot.
- clist = reduce(lambda x,y: x+y, compiler_lists)
- return clist
+ # Find compilers for each operating system class
+ oss = all_os_classes()
+ compiler_lists = []
+ for o in oss:
+ compiler_lists.extend(o.find_compilers(*paths))
+ return compiler_lists
def supported_compilers():
@@ -227,51 +214,83 @@ def supported(compiler_spec):
@_auto_compiler_spec
-def find(compiler_spec, arch=None, scope=None):
+def find(compiler_spec, scope=None):
"""Return specs of available compilers that match the supplied
compiler spec. Return an list if nothing found."""
- return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
+ return [c for c in all_compilers(scope) if c.satisfies(compiler_spec)]
@_auto_compiler_spec
-def compilers_for_spec(compiler_spec, arch=None, scope=None):
+def compilers_for_spec(compiler_spec, scope=None, **kwargs):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
- config = all_compilers_config(arch, scope)
+ platform = kwargs.get("platform", None)
+ config = all_compilers_config(scope)
+
+ def get_compilers(cspec):
+ compilers = []
- def get_compiler(cspec):
- items = config[str(cspec)]
+ for items in config:
+ if items['compiler']['spec'] != str(cspec):
+ continue
+ items = items['compiler']
- if not all(n in items for n in _required_instance_vars):
- raise InvalidCompilerConfigurationError(cspec)
+ if not ('paths' in items and all(n in items['paths'] for n in _path_instance_vars)):
+ raise InvalidCompilerConfigurationError(cspec)
- cls = class_for_compiler_name(cspec.name)
- compiler_paths = []
- for c in _required_instance_vars:
- compiler_path = items[c]
- if compiler_path != "None":
- compiler_paths.append(compiler_path)
+ cls = class_for_compiler_name(cspec.name)
+
+ compiler_paths = []
+ for c in _path_instance_vars:
+ compiler_path = items['paths'][c]
+ if compiler_path != "None":
+ compiler_paths.append(compiler_path)
+ else:
+ compiler_paths.append(None)
+
+ mods = items.get('modules')
+ if mods == 'None':
+ mods = []
+
+ if 'operating_system' in items:
+ operating_system = spack.architecture._operating_system_from_dict(items['operating_system'], platform)
else:
- compiler_paths.append(None)
+ operating_system = None
+
- flags = {}
- for f in spack.spec.FlagMap.valid_compiler_flags():
- if f in items:
- flags[f] = items[f]
- return cls(cspec, *compiler_paths, **flags)
+ alias = items['alias'] if 'alias' in items else None
- matches = find(compiler_spec, arch, scope)
- return [get_compiler(cspec) for cspec in matches]
+ flags = {}
+ for f in spack.spec.FlagMap.valid_compiler_flags():
+ if f in items:
+ flags[f] = items[f]
+
+ compilers.append(cls(cspec, operating_system, compiler_paths, mods, alias, **flags))
+
+ return compilers
+
+ matches = set(find(compiler_spec, scope))
+ compilers = []
+ for cspec in matches:
+ compilers.extend(get_compilers(cspec))
+ return compilers
+# return [get_compilers(cspec) for cspec in matches]
@_auto_compiler_spec
-def compiler_for_spec(compiler_spec):
+def compiler_for_spec(compiler_spec, arch):
"""Get the compiler that satisfies compiler_spec. compiler_spec must
be concrete."""
+ operating_system = arch.platform_os
assert(compiler_spec.concrete)
- compilers = compilers_for_spec(compiler_spec)
- assert(len(compilers) == 1)
+
+ compilers = [c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
+ if c.operating_system == operating_system]
+ if len(compilers) < 1:
+ raise NoCompilerForSpecError(compiler_spec, operating_system)
+ if len(compilers) > 1:
+ raise CompilerSpecInsufficientlySpecificError(compiler_spec)
return compilers[0]
@@ -289,6 +308,19 @@ def class_for_compiler_name(compiler_name):
return cls
+def all_os_classes():
+ """
+ Return the list of classes for all operating systems available on
+ this platform
+ """
+ classes = []
+
+ platform = spack.architecture.sys_type()
+ for os_class in platform.operating_sys.values():
+ classes.append(os_class)
+
+ return classes
+
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
@@ -298,9 +330,19 @@ class InvalidCompilerConfigurationError(spack.error.SpackError):
super(InvalidCompilerConfigurationError, self).__init__(
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
- % _required_instance_vars)
+ % _path_instance_vars)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
super(NoCompilersError, self).__init__("Spack could not find any compilers!")
+
+class NoCompilerForSpecError(spack.error.SpackError):
+ def __init__(self, compiler_spec, target):
+ super(NoCompilerForSpecError, self).__init__("No compilers for operating system %s satisfy spec %s" % (
+ target, compiler_spec))
+
+class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
+ def __init__(self, compiler_spec):
+ super(CompilerSpecInsufficientlySpecificError, self).__init__("Multiple compilers satisfy spec %s",
+ compiler_spec)
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index 072bcd065f..00b406d820 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -73,7 +73,7 @@ class Clang(Compiler):
return "-std=c++11"
@classmethod
- def default_version(self, comp):
+ def default_version(cls, comp):
"""The '--version' option works for clang compilers.
On most platforms, output looks like this::
diff --git a/lib/spack/spack/compilers/craype.py b/lib/spack/spack/compilers/craype.py
new file mode 100644
index 0000000000..4ba8b110ec
--- /dev/null
+++ b/lib/spack/spack/compilers/craype.py
@@ -0,0 +1,58 @@
+##############################################################################}
+# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://scalability-llnl.github.io/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import llnl.util.tty as tty
+
+#from spack.build_environment import load_module
+from spack.compiler import *
+#from spack.version import ver
+
+class Craype(Compiler):
+ # Subclasses use possible names of C compiler
+ cc_names = ['cc']
+
+ # Subclasses use possible names of C++ compiler
+ cxx_names = ['CC']
+
+ # Subclasses use possible names of Fortran 77 compiler
+ f77_names = ['ftn']
+
+ # Subclasses use possible names of Fortran 90 compiler
+ fc_names = ['ftn']
+
+ # MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
+ suffixes = [r'-mp-\d\.\d']
+
+ PrgEnv = 'PrgEnv-cray'
+ PrgEnv_compiler = 'craype'
+
+ link_paths = { 'cc' : 'cc',
+ 'cxx' : 'c++',
+ 'f77' : 'f77',
+ 'fc' : 'fc'}
+
+ @classmethod
+ def default_version(cls, comp):
+ return get_compiler_version(comp, r'([Vv]ersion).*(\d+(\.\d+)+)')
+
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 164bddeb3f..3f552eaece 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -49,6 +49,9 @@ class Gcc(Compiler):
'f77' : 'gcc/gfortran',
'fc' : 'gcc/gfortran' }
+ PrgEnv = 'PrgEnv-gnu'
+ PrgEnv_compiler = 'gcc'
+
@property
def openmp_flag(self):
return "-fopenmp"
@@ -74,9 +77,9 @@ class Gcc(Compiler):
return get_compiler_version(
fc, '-dumpversion',
# older gfortran versions don't have simple dumpversion output.
- r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
+ r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)', module)
@classmethod
def f77_version(cls, f77):
- return cls.fc_version(f77)
+ return cls.fc_version(f77, module)
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 5007ece645..6cad03ff47 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -45,6 +45,9 @@ class Intel(Compiler):
'f77' : 'intel/ifort',
'fc' : 'intel/ifort' }
+ PrgEnv = 'PrgEnv-intel'
+ PrgEnv_compiler = 'intel'
+
@property
def openmp_flag(self):
if self.version < ver('16.0'):
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index d42148dc49..6d36d8bfa6 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -44,6 +44,12 @@ class Pgi(Compiler):
'f77' : 'pgi/pgfortran',
'fc' : 'pgi/pgfortran' }
+
+
+ PrgEnv = 'PrgEnv-pgi'
+ PrgEnv_compiler = 'pgi'
+
+
@property
def openmp_flag(self):
return "-mp"
@@ -52,7 +58,6 @@ class Pgi(Compiler):
def cxx11_flag(self):
return "-std=c++11"
-
@classmethod
def default_version(cls, comp):
"""The '-V' option works for all the PGI compilers.
diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py
index bda2de4b87..b1431436ad 100644
--- a/lib/spack/spack/compilers/xl.py
+++ b/lib/spack/spack/compilers/xl.py
@@ -56,8 +56,9 @@ class Xl(Compiler):
else:
return "-qlanglvl=extended0x"
+
@classmethod
- def default_version(self, comp):
+ def default_version(cls, comp):
"""The '-qversion' is the standard option fo XL compilers.
Output looks like this::
@@ -83,6 +84,7 @@ class Xl(Compiler):
return get_compiler_version(
comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
+
@classmethod
def fc_version(cls, fc):
"""The fortran and C/C++ versions of the XL compiler are always two units apart.
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 4f78bfc347..1f37455c77 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -84,7 +84,8 @@ class DefaultConcretizer(object):
raise NoBuildError(spec)
def cmp_externals(a, b):
- if a.name != b.name:
+ if a.name != b.name and (not a.external or a.external_module and
+ not b.external and b.external_module):
# We're choosing between different providers, so
# maintain order from provider sort
return candidates.index(a) - candidates.index(b)
@@ -187,31 +188,64 @@ class DefaultConcretizer(object):
return True # Things changed
+ def _concretize_operating_system(self, spec):
+ platform = spec.architecture.platform
+ if spec.architecture.platform_os is not None and isinstance(
+ spec.architecture.platform_os,spack.architecture.OperatingSystem):
+ return False
- def concretize_architecture(self, spec):
- """If the spec already had an architecture, return. Otherwise if
- the root of the DAG has an architecture, then use that.
- Otherwise take the system's default architecture.
-
- Intuition: Architectures won't be set a lot, and generally you
- want the host system's architecture. When architectures are
- mised in a spec, it is likely because the tool requries a
- cross-compiled component, e.g. for tools that run on BlueGene
- or Cray machines. These constraints will likely come directly
- from packages, so require the user to be explicit if they want
- to mess with the architecture, and revert to the default when
- they're not explicit.
- """
- if spec.architecture is not None:
+ if spec.root.architecture and spec.root.architecture.platform_os:
+ if isinstance(spec.root.architecture.platform_os,spack.architecture.OperatingSystem):
+ spec.architecture.platform_os = spec.root.architecture.platform_os
+ else:
+ spec.architecture.platform_os = spec.architecture.platform.operating_system('default_os')
+ return True #changed
+
+ def _concretize_target(self, spec):
+ platform = spec.architecture.platform
+ if spec.architecture.target is not None and isinstance(
+ spec.architecture.target, spack.architecture.Target):
return False
+ if spec.root.architecture and spec.root.architecture.target:
+ if isinstance(spec.root.architecture.target,spack.architecture.Target):
+ spec.architecture.target = spec.root.architecture.target
+ else:
+ spec.architecture.target = spec.architecture.platform.target('default_target')
+ return True #changed
- if spec.root.architecture:
- spec.architecture = spec.root.architecture
+ def _concretize_platform(self, spec):
+ if spec.architecture.platform is not None and isinstance(
+ spec.architecture.platform, spack.architecture.Platform):
+ return False
+ if spec.root.architecture and spec.root.architecture.platform:
+ if isinstance(spec.root.architecture.platform,spack.architecture.Platform):
+ spec.architecture.platform = spec.root.architecture.platform
else:
- spec.architecture = spack.architecture.sys_type()
+ spec.architecture.platform = spack.architecture.sys_type()
+ return True #changed?
+
+ def concretize_architecture(self, spec):
+ """If the spec is empty provide the defaults of the platform. If the
+ architecture is not a basestring, then check if either the platform,
+ target or operating system are concretized. If any of the fields are
+ changed then return True. If everything is concretized (i.e the
+ architecture attribute is a namedtuple of classes) then return False.
+ If the target is a string type, then convert the string into a
+ concretized architecture. If it has no architecture and the root of the
+ DAG has an architecture, then use the root otherwise use the defaults
+ on the platform.
+ """
+ if spec.architecture is None:
+ # Set the architecture to all defaults
+ spec.architecture = spack.architecture.Arch()
+ return True
+
+ # Concretize the operating_system and target based of the spec
+ ret = any((self._concretize_platform(spec),
+ self._concretize_operating_system(spec),
+ self._concretize_target(spec)))
+ return ret
- assert(spec.architecture is not None)
- return True # changed
def concretize_variants(self, spec):
@@ -238,6 +272,23 @@ class DefaultConcretizer(object):
build with the compiler that will be used by libraries that
link to this one, to maximize compatibility.
"""
+ # Pass on concretizing the compiler if the target is not yet determined
+ if not spec.architecture.platform_os:
+ #Although this usually means changed, this means awaiting other changes
+ return True
+
+ # Only use a matching compiler if it is of the proper style
+ # Takes advantage of the proper logic already existing in compiler_for_spec
+ # Should think whether this can be more efficient
+ def _proper_compiler_style(cspec, arch):
+ platform = arch.platform
+ compilers = spack.compilers.compilers_for_spec(cspec,
+ platform=platform)
+ return filter(lambda c: c.operating_system ==
+ arch.platform_os, compilers)
+ #return compilers
+
+
all_compilers = spack.compilers.all_compilers()
if (spec.compiler and
@@ -247,6 +298,7 @@ class DefaultConcretizer(object):
#Find the another spec that has a compiler, or the root if none do
other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
+
if not other_spec:
other_spec = spec.root
other_compiler = other_spec.compiler
@@ -265,7 +317,12 @@ class DefaultConcretizer(object):
raise UnavailableCompilerVersionError(other_compiler)
# copy concrete version into other_compiler
- spec.compiler = matches[0].copy()
+ index = 0
+ while not _proper_compiler_style(matches[index], spec.architecture):
+ index += 1
+ if index == len(matches) - 1:
+ raise NoValidVersionError(spec)
+ spec.compiler = matches[index].copy()
assert(spec.compiler.concrete)
return True # things changed.
@@ -276,15 +333,21 @@ class DefaultConcretizer(object):
compiler is used, defaulting to no compiler flags in the spec.
Default specs set at the compiler level will still be added later.
"""
+
+
+ if not spec.architecture.platform_os:
+ #Although this usually means changed, this means awaiting other changes
+ return True
+
ret = False
for flag in spack.spec.FlagMap.valid_compiler_flags():
try:
nearest = next(p for p in spec.traverse(direction='parents')
if ((p.compiler == spec.compiler and p is not spec)
and flag in p.compiler_flags))
- if ((not flag in spec.compiler_flags) or
- sorted(spec.compiler_flags[flag]) != sorted(nearest.compiler_flags[flag])):
- if flag in spec.compiler_flags:
+ if not flag in spec.compiler_flags or \
+ not (sorted(spec.compiler_flags[flag]) >= sorted(nearest.compiler_flags[flag])):
+ if flag in spec.compiler_flags:
spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
set(nearest.compiler_flags[flag]))
else:
@@ -307,7 +370,7 @@ class DefaultConcretizer(object):
# Include the compiler flag defaults from the config files
# This ensures that spack will detect conflicts that stem from a change
# in default compiler flags.
- compiler = spack.compilers.compiler_for_spec(spec.compiler)
+ compiler = spack.compilers.compiler_for_spec(spec.compiler, spec.architecture)
for flag in compiler.flags:
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = compiler.flags[flag]
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index ec37bd290c..db0787edc6 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -135,7 +135,7 @@ from yaml.error import MarkedYAMLError
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
-
+from spack.build_environment import get_path_from_module
"""Dict from section names -> schema for that section."""
section_schemas = {
@@ -146,18 +146,17 @@ section_schemas = {
'additionalProperties': False,
'patternProperties': {
'compilers:?': { # optional colon for overriding site config.
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': { # architecture
+ 'type': 'array',
+ 'items': {
+ 'compiler': {
'type': 'object',
'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*@\w[\w-]*': { # compiler spec
+ 'required': ['paths', 'spec', 'modules', 'operating_system'],
+ 'properties': {
+ 'paths': {
'type': 'object',
- 'additionalProperties': False,
'required': ['cc', 'cxx', 'f77', 'fc'],
+ 'additionalProperties': False,
'properties': {
'cc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
@@ -167,8 +166,27 @@ section_schemas = {
{'type' : 'null' }]},
'fc': { 'anyOf': [ {'type' : 'string' },
{'type' : 'null' }]},
- },},},},},},},},
-
+ 'cflags': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'cxxflags': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'fflags': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'cppflags': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'ldflags': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]},
+ 'ldlibs': { 'anyOf': [ {'type' : 'string' },
+ {'type' : 'null' }]}}},
+ 'spec': { 'type': 'string'},
+ 'operating_system': { 'type': 'string'},
+ 'alias': { 'anyOf': [ {'type' : 'string'},
+ {'type' : 'null' }]},
+ 'modules': { 'anyOf': [ {'type' : 'string'},
+ {'type' : 'null' },
+ {'type': 'array'},
+ ]}
+ },},},},},},
'mirrors': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack mirror configuration file schema',
@@ -194,7 +212,6 @@ section_schemas = {
'default': [],
'items': {
'type': 'string'},},},},
-
'packages': {
'$schema': 'http://json-schema.org/schema#',
'title': 'Spack package configuration file schema',
@@ -224,6 +241,10 @@ section_schemas = {
'type': 'boolean',
'default': True,
},
+ 'modules': {
+ 'type' : 'object',
+ 'default' : {},
+ },
'providers': {
'type': 'object',
'default': {},
@@ -563,8 +584,7 @@ def _merge_yaml(dest, source):
# Source list is prepended (for precedence)
if they_are(list):
- seen = set(source)
- dest[:] = source + [x for x in dest if x not in seen]
+ dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
@@ -667,7 +687,8 @@ def spec_externals(spec):
external_specs = []
pkg_paths = allpkgs.get(name, {}).get('paths', None)
- if not pkg_paths:
+ pkg_modules = allpkgs.get(name, {}).get('modules', None)
+ if (not pkg_paths) and (not pkg_modules):
return []
for external_spec, path in pkg_paths.iteritems():
@@ -678,6 +699,17 @@ def spec_externals(spec):
external_spec = spack.spec.Spec(external_spec, external=path)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
+
+ for external_spec, module in pkg_modules.iteritems():
+ if not module:
+ continue
+
+ path = get_path_from_module(module)
+
+ external_spec = spack.spec.Spec(external_spec, external=path, external_module=module)
+ if external_spec.satisfies(spec):
+ external_specs.append(external_spec)
+
return external_specs
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index e768ddf5fe..f941346bb1 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -214,9 +214,10 @@ class Database(object):
# Add dependencies from other records in the install DB to
# form a full spec.
- for dep_hash in spec_dict[spec.name]['dependencies'].values():
- child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
- spec._add_dependency(child)
+ if 'dependencies' in spec_dict[spec.name]:
+ for dep_hash in spec_dict[spec.name]['dependencies'].values():
+ child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
+ spec._add_dependency(child)
# Specs from the database need to be marked concrete because
# they represent actual installations.
@@ -289,7 +290,8 @@ class Database(object):
except Exception as e:
tty.warn("Invalid database reecord:",
"file: %s" % self._index_path,
- "hash: %s" % hash_key, "cause: %s" % str(e))
+ "hash: %s" % hash_key,
+ "cause: %s: %s" % (type(e).__name__, str(e)))
raise
self._data = data
@@ -309,7 +311,11 @@ class Database(object):
for spec in directory_layout.all_specs():
# Create a spec for each known package and add it.
path = directory_layout.path_for_spec(spec)
- self._add(spec, path, directory_layout)
+ old_info = old_data.get(spec.dag_hash())
+ explicit = False
+ if old_info is not None:
+ explicit = old_info.explicit
+ self._add(spec, path, directory_layout, explicit=explicit)
self._check_ref_counts()
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 51b26773e2..ca8f21dc08 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -257,7 +257,7 @@ def variant(pkg, name, default=False, description=""):
"""Define a variant for the package. Packager can specify a default
value (on or off) as well as a text description."""
- default = bool(default)
+ default = default
description = str(description).strip()
if not re.match(spack.spec.identifier_re, name):
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 32d27d7bd0..7e20365b0f 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -165,7 +165,7 @@ class DirectoryLayout(object):
class YamlDirectoryLayout(DirectoryLayout):
"""Lays out installation directories like this::
<install root>/
- <architecture>/
+ <target>/
<compiler>-<compiler version>/
<name>-<version>-<variants>-<hash>
@@ -207,8 +207,7 @@ class YamlDirectoryLayout(DirectoryLayout):
spec.version,
spec.dag_hash(self.hash_len))
- path = join_path(
- spec.architecture,
+ path = join_path(spec.architecture,
"%s-%s" % (spec.compiler.name, spec.compiler.version),
dir_name)
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 2607d0a7f4..6f28ec34b2 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -1,4 +1,4 @@
-##############################################################################
+#
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
@@ -21,7 +21,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
+#
"""
Fetch strategies are used to download source code into a staging area
in order to build it. They need to define the following methods:
@@ -75,11 +75,13 @@ def _needs_stage(fun):
class FetchStrategy(object):
+
"""Superclass of all fetch strategies."""
enabled = False # Non-abstract subclasses should be enabled.
required_attributes = None # Attributes required in version() args.
class __metaclass__(type):
+
"""This metaclass registers all fetch strategies in a list."""
def __init__(cls, name, bases, dict):
@@ -126,6 +128,7 @@ class FetchStrategy(object):
@pattern.composite(interface=FetchStrategy)
class FetchStrategyComposite(object):
+
"""
Composite for a FetchStrategy object. Implements the GoF composite pattern.
"""
@@ -134,6 +137,7 @@ class FetchStrategyComposite(object):
class URLFetchStrategy(FetchStrategy):
+
"""FetchStrategy that pulls source code from a URL for an archive,
checks the archive against a checksum,and decompresses the archive.
"""
@@ -235,12 +239,13 @@ class URLFetchStrategy(FetchStrategy):
# redirects properly.
content_types = re.findall(r'Content-Type:[^\r\n]+', headers)
if content_types and 'text/html' in content_types[-1]:
- tty.warn(
- "The contents of " + self.archive_file + " look like HTML.",
- "The checksum will likely be bad. If it is, you can use",
- "'spack clean <package>' to remove the bad archive, then fix",
- "your internet gateway issue and install again.")
-
+ tty.warn("The contents of ",
+ (self.archive_file if self.archive_file is not None
+ else "the archive"),
+ " look like HTML.",
+ "The checksum will likely be bad. If it is, you can use",
+ "'spack clean <package>' to remove the bad archive, then",
+ "fix your internet gateway issue and install again.")
if save_file:
os.rename(partial_file, save_file)
@@ -371,6 +376,7 @@ class CacheURLFetchStrategy(URLFetchStrategy):
class VCSFetchStrategy(FetchStrategy):
+
def __init__(self, name, *rev_types, **kwargs):
super(VCSFetchStrategy, self).__init__()
self.name = name
@@ -425,6 +431,7 @@ class VCSFetchStrategy(FetchStrategy):
class GoFetchStrategy(VCSFetchStrategy):
+
"""
Fetch strategy that employs the `go get` infrastructure
Use like this in a package:
@@ -484,6 +491,7 @@ class GoFetchStrategy(VCSFetchStrategy):
class GitFetchStrategy(VCSFetchStrategy):
+
"""
Fetch strategy that gets source code from a git repository.
Use like this in a package:
@@ -604,6 +612,7 @@ class GitFetchStrategy(VCSFetchStrategy):
class SvnFetchStrategy(VCSFetchStrategy):
+
"""Fetch strategy that gets source code from a subversion repository.
Use like this in a package:
@@ -680,6 +689,7 @@ class SvnFetchStrategy(VCSFetchStrategy):
class HgFetchStrategy(VCSFetchStrategy):
+
"""
Fetch strategy that gets source code from a Mercurial repository.
Use like this in a package:
@@ -850,11 +860,13 @@ class FsCache(object):
class FetchError(spack.error.SpackError):
+
def __init__(self, msg, long_msg=None):
super(FetchError, self).__init__(msg, long_msg)
class FailedDownloadError(FetchError):
+
"""Raised wen a download fails."""
def __init__(self, url, msg=""):
@@ -864,16 +876,19 @@ class FailedDownloadError(FetchError):
class NoArchiveFileError(FetchError):
+
def __init__(self, msg, long_msg):
super(NoArchiveFileError, self).__init__(msg, long_msg)
class NoDigestError(FetchError):
+
def __init__(self, msg, long_msg=None):
super(NoDigestError, self).__init__(msg, long_msg)
class InvalidArgsError(FetchError):
+
def __init__(self, pkg, version):
msg = ("Could not construct a fetch strategy for package %s at "
"version %s")
@@ -882,6 +897,7 @@ class InvalidArgsError(FetchError):
class ChecksumError(FetchError):
+
"""Raised when archive fails to checksum."""
def __init__(self, message, long_msg=None):
@@ -889,6 +905,7 @@ class ChecksumError(FetchError):
class NoStageError(FetchError):
+
"""Raised when fetch operations are called before set_stage()."""
def __init__(self, method):
diff --git a/lib/spack/spack/hooks/licensing.py b/lib/spack/spack/hooks/licensing.py
index 0f63b0e05a..9010b84154 100644
--- a/lib/spack/spack/hooks/licensing.py
+++ b/lib/spack/spack/hooks/licensing.py
@@ -26,7 +26,7 @@ import os
import spack
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
+from llnl.util.filesystem import join_path, mkdirp
def pre_install(pkg):
@@ -154,6 +154,9 @@ def symlink_license(pkg):
target = pkg.global_license_file
for filename in pkg.license_files:
link_name = join_path(pkg.prefix, filename)
+ license_dir = os.path.dirname(link_name)
+ if not os.path.exists(license_dir):
+ mkdirp(license_dir)
if os.path.exists(target):
os.symlink(target, link_name)
tty.msg("Added local symlink %s to global license file" %
diff --git a/lib/spack/spack/operating_systems/__init__.py b/lib/spack/spack/operating_systems/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/spack/operating_systems/__init__.py
diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py
new file mode 100644
index 0000000000..c160a60be8
--- /dev/null
+++ b/lib/spack/spack/operating_systems/cnl.py
@@ -0,0 +1,62 @@
+import re
+import os
+
+from spack.architecture import OperatingSystem
+from spack.util.executable import *
+import spack.spec
+from spack.util.multiproc import parmap
+import spack.compilers
+
+class Cnl(OperatingSystem):
+ """ Compute Node Linux (CNL) is the operating system used for the Cray XC
+ series super computers. It is a very stripped down version of GNU/Linux.
+ Any compilers found through this operating system will be used with
+ modules. If updated, user must make sure that version and name are
+ updated to indicate that OS has been upgraded (or downgraded)
+ """
+ def __init__(self):
+ name = 'CNL'
+ version = '10'
+ super(Cnl, self).__init__(name, version)
+
+
+ def find_compilers(self, *paths):
+ types = spack.compilers.all_compiler_types()
+ compiler_lists = parmap(lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
+
+ # ensure all the version calls we made are cached in the parent
+ # process, as well. This speeds up Spack a lot.
+ clist = reduce(lambda x,y: x+y, compiler_lists)
+ return clist
+
+
+ def find_compiler(self, cmp_cls, *paths):
+ compilers = []
+ if cmp_cls.PrgEnv:
+ if not cmp_cls.PrgEnv_compiler:
+ tty.die('Must supply PrgEnv_compiler with PrgEnv')
+
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Save the environment variable to restore later
+ old_modulepath = os.environ['MODULEPATH']
+ # if given any explicit paths, search them for module files too
+ if paths:
+ module_paths = ':' + ':'.join(p for p in paths)
+ os.environ['MODULEPATH'] = module_paths
+
+ output = modulecmd('avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
+ matches = re.findall(r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output)
+ for name, version in matches:
+ v = version
+ comp = cmp_cls(spack.spec.CompilerSpec(name + '@' + v), self,
+ ['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name +'/' + v])
+
+ compilers.append(comp)
+
+ # Restore modulepath environment variable
+ if paths:
+ os.environ['MODULEPATH'] = old_modulepath
+
+ return compilers
diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py
new file mode 100644
index 0000000000..2e3c72719b
--- /dev/null
+++ b/lib/spack/spack/operating_systems/linux_distro.py
@@ -0,0 +1,22 @@
+import re
+import platform as py_platform
+from spack.architecture import OperatingSystem
+
+class LinuxDistro(OperatingSystem):
+ """ This class will represent the autodetected operating system
+ for a Linux System. Since there are many different flavors of
+ Linux, this class will attempt to encompass them all through
+ autodetection using the python module platform and the method
+ platform.dist()
+ """
+ def __init__(self):
+ distname, version, _ = py_platform.linux_distribution(
+ full_distribution_name=False)
+
+ # Grabs major version from tuple on redhat; on other platforms
+ # grab the first legal identifier in the version field. On
+ # debian you get things like 'wheezy/sid'; sid means unstable.
+ # We just record 'wheezy' and don't get quite so detailed.
+ version = re.split(r'[^\w-]', version)[0]
+
+ super(LinuxDistro, self).__init__(distname, version)
diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py
new file mode 100644
index 0000000000..f35b3ca577
--- /dev/null
+++ b/lib/spack/spack/operating_systems/mac_os.py
@@ -0,0 +1,29 @@
+import platform as py_platform
+from spack.architecture import OperatingSystem
+
+class MacOs(OperatingSystem):
+ """This class represents the macOS operating system. This will be
+ auto detected using the python platform.mac_ver. The macOS
+ platform will be represented using the major version operating
+ system name, i.e el capitan, yosemite...etc.
+ """
+
+ def __init__(self):
+ """ Autodetects the mac version from a dictionary. Goes back as
+ far as 10.6 snowleopard. If the user has an older mac then
+ the version will just be a generic mac_os.
+ """
+ mac_releases = {'10.6': "snowleopard",
+ "10.7": "lion",
+ "10.8": "mountainlion",
+ "10.9": "mavericks",
+ "10.10": "yosemite",
+ "10.11": "elcapitan",
+ "10.12": "sierra"}
+
+ mac_ver = py_platform.mac_ver()[0][:-2]
+ name = mac_releases.get(mac_ver, "macos")
+ super(MacOs, self).__init__(name, mac_ver)
+
+ def __str__(self):
+ return self.name
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index cbf50e56f6..c5250d17c0 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -398,13 +398,19 @@ class Package(object):
spack.repo.get(self.extendee_spec)._check_extendable()
@property
+ def global_license_dir(self):
+ """Returns the directory where global license files for all
+ packages are stored."""
+ spack_root = ancestor(__file__, 4)
+ return join_path(spack_root, 'etc', 'spack', 'licenses')
+
+ @property
def global_license_file(self):
- """Returns the path where a global license file should be stored."""
+ """Returns the path where a global license file for this
+ particular package should be stored."""
if not self.license_files:
return
- spack_root = ancestor(__file__, 4)
- global_license_dir = join_path(spack_root, 'etc', 'spack', 'licenses')
- return join_path(global_license_dir, self.name,
+ return join_path(self.global_license_dir, self.name,
os.path.basename(self.license_files[0]))
@property
@@ -676,11 +682,13 @@ class Package(object):
return self.spec.prefix
@property
+ #TODO: Change this to architecture
def compiler(self):
"""Get the spack.compiler.Compiler object used to build this package"""
if not self.spec.concrete:
raise ValueError("Can only get a compiler for a concrete package.")
- return spack.compilers.compiler_for_spec(self.spec.compiler)
+ return spack.compilers.compiler_for_spec(self.spec.compiler,
+ self.spec.architecture)
def url_version(self, version):
"""
diff --git a/lib/spack/spack/platforms/__init__.py b/lib/spack/spack/platforms/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/spack/platforms/__init__.py
diff --git a/lib/spack/spack/platforms/bgq.py b/lib/spack/spack/platforms/bgq.py
new file mode 100644
index 0000000000..e0eb76f336
--- /dev/null
+++ b/lib/spack/spack/platforms/bgq.py
@@ -0,0 +1,18 @@
+import os
+from spack.architecture import Platform, Target
+
+class Bgq(Platform):
+ priority = 30
+ front_end = 'power7'
+ back_end = 'powerpc'
+ default = 'powerpc'
+
+ def __init__(self):
+ super(Bgq, self).__init__('bgq')
+ self.add_target(self.front_end, Target(self.front_end))
+ self.add_target(self.back_end, Target(self.back_end,))
+
+ @classmethod
+ def detect(self):
+ return os.path.exists('/bgsys')
+
diff --git a/lib/spack/spack/platforms/cray_xc.py b/lib/spack/spack/platforms/cray_xc.py
new file mode 100644
index 0000000000..e710303e23
--- /dev/null
+++ b/lib/spack/spack/platforms/cray_xc.py
@@ -0,0 +1,46 @@
+import os
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnl import Cnl
+
+class CrayXc(Platform):
+ priority = 20
+ front_end = 'sandybridge'
+ back_end = 'ivybridge'
+ default = 'ivybridge'
+
+ front_os = "SuSE11"
+ back_os = "CNL10"
+ default_os = "CNL10"
+
+ def __init__(self):
+ ''' Since cori doesn't have ivybridge as a front end it's better
+ if we use CRAY_CPU_TARGET as the default. This will ensure
+ that if we're on a XC-40 or XC-30 then we can detect the target
+ '''
+ super(CrayXc, self).__init__('cray_xc')
+
+ # Handle the default here so we can check for a key error
+ if 'CRAY_CPU_TARGET' in os.environ:
+ self.default = os.environ['CRAY_CPU_TARGET']
+
+ # Change the defaults to haswell if we're on an XC40
+ if self.default == 'haswell':
+ self.front_end = self.default
+ self.back_end = self.default
+
+ # Could switch to use modules and fe targets for front end
+ # Currently using compilers by path for front end.
+ self.add_target('sandybridge', Target('sandybridge'))
+ self.add_target('ivybridge',
+ Target('ivybridge', 'craype-ivybridge'))
+ self.add_target('haswell',
+ Target('haswell','craype-haswell'))
+
+ self.add_operating_system('SuSE11', LinuxDistro())
+ self.add_operating_system('CNL10', Cnl())
+
+ @classmethod
+ def detect(self):
+ return os.path.exists('/opt/cray/craype')
+
diff --git a/lib/spack/spack/platforms/darwin.py b/lib/spack/spack/platforms/darwin.py
new file mode 100644
index 0000000000..d47dd640f9
--- /dev/null
+++ b/lib/spack/spack/platforms/darwin.py
@@ -0,0 +1,26 @@
+import subprocess
+from spack.architecture import Platform, Target
+from spack.operating_systems.mac_os import MacOs
+
+class Darwin(Platform):
+ priority = 89
+ front_end = 'x86_64'
+ back_end = 'x86_64'
+ default = 'x86_64'
+
+ def __init__(self):
+ super(Darwin, self).__init__('darwin')
+ self.add_target(self.default, Target(self.default))
+ mac_os = MacOs()
+
+ self.default_os = str(mac_os)
+ self.front_os = str(mac_os)
+ self.back_os = str(mac_os)
+
+ self.add_operating_system(str(mac_os), mac_os)
+
+ @classmethod
+ def detect(self):
+ platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE)
+ platform, _ = platform.communicate()
+ return 'darwin' in platform.strip().lower()
diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py
new file mode 100644
index 0000000000..4d8adac384
--- /dev/null
+++ b/lib/spack/spack/platforms/linux.py
@@ -0,0 +1,24 @@
+import subprocess
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+
+class Linux(Platform):
+ priority = 90
+ front_end = 'x86_64'
+ back_end = 'x86_64'
+ default = 'x86_64'
+
+ def __init__(self):
+ super(Linux, self).__init__('linux')
+ self.add_target(self.default, Target(self.default))
+ linux_dist = LinuxDistro()
+ self.default_os = str(linux_dist)
+ self.front_os = self.default_os
+ self.back_os = self.default_os
+ self.add_operating_system(str(linux_dist), linux_dist)
+
+ @classmethod
+ def detect(self):
+ platform = subprocess.Popen(['uname', '-a'], stdout = subprocess.PIPE)
+ platform, _ = platform.communicate()
+ return 'linux' in platform.strip().lower()
diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py
new file mode 100644
index 0000000000..8fa2585a7a
--- /dev/null
+++ b/lib/spack/spack/platforms/test.py
@@ -0,0 +1,28 @@
+import subprocess
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnl import Cnl
+
+
+class Test(Platform):
+ priority = 1000000
+ front_end = 'x86_32'
+ back_end = 'x86_64'
+ default = 'x86_64'
+
+ back_os = 'CNL10'
+ default_os = 'CNL10'
+
+ def __init__(self):
+ super(Test, self).__init__('test')
+ self.add_target(self.default, Target(self.default))
+ self.add_target(self.front_end, Target(self.front_end))
+
+ self.add_operating_system(self.default_os, Cnl())
+ linux_dist = LinuxDistro()
+ self.front_os = linux_dist.name
+ self.add_operating_system(self.front_os, linux_dist)
+
+ @classmethod
+ def detect(self):
+ return True
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 7c09af4c21..54219ec1b4 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1,4 +1,4 @@
-#
+##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
@@ -18,10 +18,10 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
"""
Spack allows very fine-grained control over how packages are installed and
over how they are built and configured. To make this easy, it has its own
@@ -96,8 +96,10 @@ specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line.
"""
import sys
+import itertools
import hashlib
import base64
+import imp
from StringIO import StringIO
from operator import attrgetter
import yaml
@@ -106,16 +108,22 @@ from yaml.error import MarkedYAMLError
import llnl.util.tty as tty
from llnl.util.lang import *
from llnl.util.tty.color import *
+from llnl.util.filesystem import join_path
import spack
+import spack.architecture
import spack.parse
import spack.error
import spack.compilers as compilers
+# TODO: move display_specs to some other location.
+from spack.cmd.find import display_specs
from spack.version import *
from spack.util.string import *
from spack.util.prefix import Prefix
+from spack.util.naming import mod_to_class
from spack.virtual import ProviderIndex
+from spack.build_environment import get_path_from_module, load_module
# Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
@@ -165,7 +173,6 @@ def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
color_formats."""
class insert_color:
-
def __init__(self):
self.last = None
@@ -183,11 +190,9 @@ def colorize_spec(spec):
@key_ordering
class CompilerSpec(object):
-
"""The CompilerSpec field represents the compiler or range of compiler
versions that a package should be built with. CompilerSpecs have a
name and a version list. """
-
def __init__(self, *args):
nargs = len(args)
if nargs == 1:
@@ -293,7 +298,6 @@ class VariantSpec(object):
on the particular package being built, and each named variant can
be enabled or disabled.
"""
-
def __init__(self, name, value):
self.name = name
self.value = value
@@ -488,7 +492,8 @@ class Spec(object):
self._concrete = kwargs.get('concrete', False)
# Allow a spec to be constructed with an external path.
- self.external = kwargs.get('external', None)
+ self.external = kwargs.get('external', None)
+ self.external_module = kwargs.get('external_module', None)
# This allows users to construct a spec DAG with literals.
# Note that given two specs a and b, Spec(a) copies a, but
@@ -520,8 +525,33 @@ class Spec(object):
Known flags currently include "arch"
"""
valid_flags = FlagMap.valid_compiler_flags()
- if name == 'arch':
- self._set_architecture(value)
+ if name == 'arch' or name == 'architecture':
+ parts = value.split('-')
+ if len(parts) == 3:
+ platform, op_sys, target = parts
+ else:
+ platform, op_sys, target = None, None, value
+
+ assert(self.architecture.platform is None)
+ assert(self.architecture.platform_os is None)
+ assert(self.architecture.target is None)
+ assert(self.architecture.os_string is None)
+ assert(self.architecture.target_string is None)
+ self._set_platform(platform)
+ self._set_os(op_sys)
+ self._set_target(target)
+ elif name == 'platform':
+ self._set_platform(value)
+ elif name == 'os' or name == 'operating_system':
+ if self.architecture.platform:
+ self._set_os(value)
+ else:
+ self.architecture.os_string = value
+ elif name == 'target':
+ if self.architecture.platform:
+ self._set_target(value)
+ else:
+ self.architecture.target_string = value
elif name in valid_flags:
assert(self.compiler_flags is not None)
self.compiler_flags[name] = value.split()
@@ -535,12 +565,49 @@ class Spec(object):
"Spec for '%s' cannot have two compilers." % self.name)
self.compiler = compiler
- def _set_architecture(self, architecture):
- """Called by the parser to set the architecture."""
- if self.architecture:
- raise DuplicateArchitectureError(
- "Spec for '%s' cannot have two architectures." % self.name)
- self.architecture = architecture
+ def _set_platform(self, value):
+ """Called by the parser to set the architecture platform"""
+ if isinstance(value, basestring):
+ mod_path = spack.platform_path
+ mod_string = 'spack.platformss'
+ names = list_modules(mod_path)
+ if value in names:
+ # Create a platform object from the name
+ mod_name = mod_string + value
+ path = join_path(mod_path, value) + '.py'
+ mod = imp.load_source(mod_name, path)
+ class_name = mod_to_class(value)
+ if not hasattr(mod, class_name):
+ tty.die('No class %s defined in %s' % (class_name, mod_name))
+ cls = getattr(mod, class_name)
+ if not inspect.isclass(cls):
+ tty.die('%s.%s is not a class' % (mod_name, class_name))
+ platform = cls()
+ else:
+ tty.die("No platform class %s defined." % value)
+ else:
+ # The value is a platform
+ platform = value
+
+ self.architecture.platform = platform
+
+ # Set os and target if we previously got strings for them
+ if self.architecture.os_string:
+ self._set_os(self.architecture.os_string)
+ self.architecture.os_string = None
+ if self.architecture.target_string:
+ self._set_target(self.architecture.target_string)
+ self.architecture.target_string = None
+
+ def _set_os(self, value):
+ """Called by the parser to set the architecture operating system"""
+ if self.architecture.platform:
+ self.architecture.platform_os = self.architecture.platform.operating_system(value)
+
+ def _set_target(self, value):
+ """Called by the parser to set the architecture target"""
+ if self.architecture.platform:
+ self.architecture.target = self.architecture.platform.target(value)
def _add_dependency(self, spec):
"""Called by the parser to add another spec as a dependency."""
@@ -612,15 +679,15 @@ class Spec(object):
if self._concrete:
return True
- self._concrete = bool(not self.virtual and
- self.namespace is not None and
- self.versions.concrete and
- self.variants.concrete and
- self.architecture and
- self.compiler and
- self.compiler.concrete and
- self.compiler_flags.concrete and
- self.dependencies.concrete)
+ self._concrete = bool(not self.virtual
+ and self.namespace is not None
+ and self.versions.concrete
+ and self.variants.concrete
+ and self.architecture
+ and self.architecture.concrete
+ and self.compiler and self.compiler.concrete
+ and self.compiler_flags.concrete
+ and self.dependencies.concrete)
return self._concrete
def traverse(self, visited=None, d=0, **kwargs):
@@ -658,7 +725,7 @@ class Spec(object):
in the traversal.
root [=True]
- If false, this won't yield the root node, just its descendents.
+ If False, this won't yield the root node, just its descendents.
direction [=children|parents]
If 'children', does a traversal of this spec's children. If
@@ -753,10 +820,10 @@ class Spec(object):
params.update(dict((name, value)
for name, value in self.compiler_flags.items()))
d = {
- 'parameters': params,
- 'arch': self.architecture,
- 'dependencies': dict((d, self.dependencies[d].dag_hash())
- for d in sorted(self.dependencies)),
+ 'parameters' : params,
+ 'arch' : self.architecture,
+ 'dependencies' : dict((d, self.dependencies[d].dag_hash())
+ for d in sorted(self.dependencies))
}
# Older concrete specs do not have a namespace. Omit for
@@ -764,6 +831,13 @@ class Spec(object):
if not self.concrete or self.namespace:
d['namespace'] = self.namespace
+ if self.architecture:
+ # TODO: Fix the target.to_dict to account for the tuple
+ # Want it to be a dict of dicts
+ d['arch'] = self.architecture.to_dict()
+ else:
+ d['arch'] = None
+
if self.compiler:
d.update(self.compiler.to_dict())
else:
@@ -789,11 +863,12 @@ class Spec(object):
spec = Spec(name)
spec.namespace = node.get('namespace', None)
spec.versions = VersionList.from_dict(node)
- spec.architecture = node['arch']
if 'hash' in node:
spec._hash = node['hash']
+ spec.architecture = spack.architecture.arch_from_dict(node['arch'])
+
if node['compiler'] is None:
spec.compiler = None
else:
@@ -866,12 +941,10 @@ class Spec(object):
# Concretize deps first -- this is a bottom-up process.
for name in sorted(self.dependencies.keys()):
- changed |= self.dependencies[
- name]._concretize_helper(presets, visited)
+ changed |= self.dependencies[name]._concretize_helper(presets, visited)
if self.name in presets:
changed |= self.constrain(presets[self.name])
-
else:
# Concretize virtual dependencies last. Because they're added
# to presets below, their constraints will all be merged, but we'll
@@ -936,11 +1009,12 @@ class Spec(object):
"""
# Make an index of stuff this spec already provides
self_index = ProviderIndex(self.traverse(), restrict=True)
-
changed = False
done = False
+
while not done:
done = True
+
for spec in list(self.traverse()):
replacement = None
if spec.virtual:
@@ -979,24 +1053,25 @@ class Spec(object):
continue
# If replacement is external then trim the dependencies
- if replacement.external:
+ if replacement.external or replacement.external_module:
if (spec.dependencies):
changed = True
spec.dependencies = DependencyMap()
replacement.dependencies = DependencyMap()
+ replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
def feq(cfield, sfield):
return (not cfield) or (cfield == sfield)
- if replacement is spec or (
- feq(replacement.name, spec.name) and
- feq(replacement.versions, spec.versions) and
- feq(replacement.compiler, spec.compiler) and
- feq(replacement.architecture, spec.architecture) and
- feq(replacement.dependencies, spec.dependencies) and
- feq(replacement.variants, spec.variants) and
- feq(replacement.external, spec.external)):
+ if replacement is spec or (feq(replacement.name, spec.name) and
+ feq(replacement.versions, spec.versions) and
+ feq(replacement.compiler, spec.compiler) and
+ feq(replacement.architecture, spec.architecture) and
+ feq(replacement.dependencies, spec.dependencies) and
+ feq(replacement.variants, spec.variants) and
+ feq(replacement.external, spec.external) and
+ feq(replacement.external_module, spec.external_module)):
continue
# Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in
@@ -1053,6 +1128,15 @@ class Spec(object):
if s.namespace is None:
s.namespace = spack.repo.repo_for_pkg(s.name).namespace
+
+ for s in self.traverse(root=False):
+ if s.external_module:
+ compiler = spack.compilers.compiler_for_spec(s.compiler, s.architecture)
+ for mod in compiler.modules:
+ load_module(mod)
+
+ s.external = get_path_from_module(s.external_module)
+
# Mark everything in the spec as concrete, as well.
self._mark_concrete()
@@ -1253,7 +1337,7 @@ class Spec(object):
# if we descend into a virtual spec, there's nothing more
# to normalize. Concretize will finish resolving it later.
- if self.virtual or self.external:
+ if self.virtual or self.external or self.external_module:
return False
# Combine constraints from package deps with constraints from
@@ -1300,7 +1384,6 @@ class Spec(object):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
-
# Get all the dependencies into one DependencyMap
spec_deps = self.flat_dependencies(copy=False)
@@ -1378,10 +1461,21 @@ class Spec(object):
raise UnsatisfiableVariantSpecError(self.variants[v],
other.variants[v])
+ # TODO: Check out the logic here
if self.architecture is not None and other.architecture is not None:
- if self.architecture != other.architecture:
- raise UnsatisfiableArchitectureSpecError(self.architecture,
- other.architecture)
+ if self.architecture.platform is not None and other.architecture.platform is not None:
+ if self.architecture.platform != other.architecture.platform:
+ raise UnsatisfiableArchitectureSpecError(self.architecture,
+ other.architecture)
+ if self.architecture.platform_os is not None and other.architecture.platform_os is not None:
+ if self.architecture.platform_os != other.architecture.platform_os:
+ raise UnsatisfiableArchitectureSpecError(self.architecture,
+ other.architecture)
+ if self.architecture.target is not None and other.architecture.target is not None:
+ if self.architecture.target != other.architecture.target:
+ raise UnsatisfiableArchitectureSpecError(self.architecture,
+ other.architecture)
+
changed = False
if self.compiler is not None and other.compiler is not None:
@@ -1395,9 +1489,17 @@ class Spec(object):
changed |= self.compiler_flags.constrain(other.compiler_flags)
- old = self.architecture
- self.architecture = self.architecture or other.architecture
- changed |= (self.architecture != old)
+ old = str(self.architecture)
+ if self.architecture is None or other.architecture is None:
+ self.architecture = self.architecture or other.architecture
+ else:
+ if self.architecture.platform is None or other.architecture.platform is None:
+ self.architecture.platform = self.architecture.platform or other.architecture.platform
+ if self.architecture.platform_os is None or other.architecture.platform_os is None:
+ self.architecture.platform_os = self.architecture.platform_os or other.architecture.platform_os
+ if self.architecture.target is None or other.architecture.target is None:
+ self.architecture.target = self.architecture.target or other.architecture.target
+ changed |= (str(self.architecture) != old)
if deps:
changed |= self._constrain_dependencies(other)
@@ -1524,9 +1626,14 @@ class Spec(object):
# Architecture satisfaction is currently just string equality.
# If not strict, None means unconstrained.
if self.architecture and other.architecture:
- if self.architecture != other.architecture:
+ if ((self.architecture.platform and other.architecture.platform and self.architecture.platform != other.architecture.platform) or
+ (self.architecture.platform_os and other.architecture.platform_os and self.architecture.platform_os != other.architecture.platform_os) or
+ (self.architecture.target and other.architecture.target and self.architecture.target != other.architecture.target)):
return False
- elif strict and (other.architecture and not self.architecture):
+ elif strict and ((other.architecture and not self.architecture) or
+ (other.architecture.platform and not self.architecture.platform) or
+ (other.architecture.platform_os and not self.architecture.platform_os) or
+ (other.architecture.target and not self.architecture.target)):
return False
if not self.compiler_flags.satisfies(
@@ -1601,20 +1708,17 @@ class Spec(object):
Options:
dependencies[=True]
- Whether deps should be copied too. Set to false to copy a
+ Whether deps should be copied too. Set to False to copy a
spec but not its dependencies.
"""
# We don't count dependencies as changes here
changed = True
if hasattr(self, 'name'):
- changed = (self.name != other.name and
- self.versions != other.versions and
- self.architecture != other.architecture and
- self.compiler != other.compiler and
- self.variants != other.variants and
- self._normal != other._normal and
- self.concrete != other.concrete and
- self.external != other.external)
+ changed = (self.name != other.name and self.versions != other.versions and \
+ self.architecture != other.architecture and self.compiler != other.compiler and \
+ self.variants != other.variants and self._normal != other._normal and \
+ self.concrete != other.concrete and self.external != other.external and \
+ self.external_module != other.external_module and self.compiler_flags != other.compiler_flags)
# Local node attributes get copied first.
self.name = other.name
@@ -1628,6 +1732,7 @@ class Spec(object):
self.variants = other.variants.copy()
self.variants.spec = self
self.external = other.external
+ self.external_module = other.external_module
self.namespace = other.namespace
self._hash = other._hash
@@ -1648,6 +1753,7 @@ class Spec(object):
self._normal = other._normal
self._concrete = other._concrete
self.external = other.external
+ self.external_module = other.external_module
return changed
def copy(self, **kwargs):
@@ -1752,6 +1858,7 @@ class Spec(object):
self.compiler,
self.compiler_flags)
+
def eq_node(self, other):
"""Equality with another spec, not including dependencies."""
return self._cmp_node() == other._cmp_node()
@@ -1862,7 +1969,7 @@ class Spec(object):
if self.variants:
write(fmt % str(self.variants), c)
elif c == '=':
- if self.architecture:
+ if self.architecture and str(self.architecture):
write(fmt % (' arch' + c + str(self.architecture)), c)
elif c == '#':
out.write('-' + fmt % (self.dag_hash(7)))
@@ -1920,8 +2027,8 @@ class Spec(object):
if self.variants:
write(fmt % str(self.variants), '+')
elif named_str == 'ARCHITECTURE':
- if self.architecture:
- write(fmt % str(self.architecture), '=')
+ if self.architecture and str(self.architecture):
+ write(fmt % str(self.architecture), ' arch=')
elif named_str == 'SHA1':
if self.dependencies:
out.write(fmt % str(self.dag_hash(7)))
@@ -1946,6 +2053,41 @@ class Spec(object):
def dep_string(self):
return ''.join("^" + dep.format() for dep in self.sorted_deps())
+
+ def __cmp__(self, other):
+ #Package name sort order is not configurable, always goes alphabetical
+ if self.name != other.name:
+ return cmp(self.name, other.name)
+
+ #Package version is second in compare order
+ pkgname = self.name
+ if self.versions != other.versions:
+ return spack.pkgsort.version_compare(pkgname,
+ self.versions, other.versions)
+
+ #Compiler is third
+ if self.compiler != other.compiler:
+ return spack.pkgsort.compiler_compare(pkgname,
+ self.compiler, other.compiler)
+
+ #Variants
+ if self.variants != other.variants:
+ return spack.pkgsort.variant_compare(pkgname,
+ self.variants, other.variants)
+
+ #Target
+ if self.architecture != other.architecture:
+ return spack.pkgsort.architecture_compare(pkgname,
+ self.architecture, other.architecture)
+
+ #Dependency is not configurable
+ if self.dependencies != other.dependencies:
+ return -1 if self.dependencies < other.dependencies else 1
+
+ #Equal specs
+ return 0
+
+
def __str__(self):
return self.format() + self.dep_string()
@@ -2015,15 +2157,17 @@ class SpecLexer(spack.parse.Lexer):
(r'\s+', lambda scanner, val: None)])
+# Lexer is always the same for every parser.
+_lexer = SpecLexer()
+
class SpecParser(spack.parse.Parser):
def __init__(self):
- super(SpecParser, self).__init__(SpecLexer())
+ super(SpecParser, self).__init__(_lexer)
self.previous = None
def do_parse(self):
specs = []
-
try:
while self.next:
# TODO: clean this parsing up a bit
@@ -2067,6 +2211,12 @@ class SpecParser(spack.parse.Parser):
except spack.parse.ParseError, e:
raise SpecParseError(e)
+
+ # If the spec has an os or a target and no platform, give it the default platform
+ for spec in specs:
+ for s in spec.traverse():
+ if s.architecture.os_string or s.architecture.target_string:
+ s._set_platform(spack.architecture.sys_type())
return specs
def parse_compiler(self, text):
@@ -2108,9 +2258,10 @@ class SpecParser(spack.parse.Parser):
spec.name = spec_name
spec.versions = VersionList()
spec.variants = VariantMap(spec)
- spec.architecture = None
+ spec.architecture = spack.architecture.Arch()
spec.compiler = None
spec.external = None
+ spec.external_module = None
spec.compiler_flags = FlagMap(spec)
spec.dependents = DependencyMap()
spec.dependencies = DependencyMap()
@@ -2186,12 +2337,6 @@ class SpecParser(spack.parse.Parser):
self.check_identifier()
return self.token.value
- def architecture(self):
- # TODO: Make this work properly as a subcase of variant (includes
- # adding names to grammar)
- self.expect(ID)
- return self.token.value
-
def version(self):
start = None
end = None
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 1668e271fa..fb91f24721 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -31,15 +31,16 @@ from llnl.util.filesystem import join_path
from llnl.util.tty.colify import colify
from spack.test.tally_plugin import Tally
"""Names of tests to be included in Spack's test suite"""
-test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage',
+
+test_names = ['architecture', 'versions', 'url_parse', 'url_substitution', 'packages', 'stage',
'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
'multimethod', 'install', 'package_sanity', 'config',
'directory_layout', 'pattern', 'python_version', 'git_fetch',
'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
'cc', 'link_tree', 'spec_yaml', 'optional_deps',
'make_executable', 'configure_guess', 'lock', 'database',
- 'namespace_trie', 'yaml', 'sbang', 'environment',
- 'cmd.uninstall', 'cmd.test_install']
+ 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
+ 'cmd.uninstall', 'cmd.test_install', 'cmd.test_compiler_cmd']
def list_tests():
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
new file mode 100644
index 0000000000..a6847c5744
--- /dev/null
+++ b/lib/spack/spack/test/architecture.py
@@ -0,0 +1,112 @@
+""" Test checks if the architecture class is created correctly and also that
+ the functions are looking for the correct architecture name
+"""
+import unittest
+import os
+import platform as py_platform
+import spack
+from spack.architecture import *
+from spack.spec import *
+from spack.platforms.cray_xc import CrayXc
+from spack.platforms.linux import Linux
+from spack.platforms.bgq import Bgq
+from spack.platforms.darwin import Darwin
+
+from spack.test.mock_packages_test import *
+
+#class ArchitectureTest(unittest.TestCase):
+class ArchitectureTest(MockPackagesTest):
+
+ def setUp(self):
+ super(ArchitectureTest, self).setUp()
+ self.platform = sys_type()
+
+ def tearDown(self):
+ super(ArchitectureTest, self).tearDown()
+
+ def test_dict_functions_for_architecture(self):
+ arch = Arch()
+ arch.platform = spack.architecture.sys_type()
+ arch.platform_os = arch.platform.operating_system('default_os')
+ arch.target = arch.platform.target('default_target')
+
+ d = arch.to_dict()
+
+ new_arch = spack.architecture.arch_from_dict(d)
+
+ self.assertEqual(arch, new_arch)
+
+ self.assertTrue( isinstance(arch, Arch) )
+ self.assertTrue( isinstance(arch.platform, Platform) )
+ self.assertTrue( isinstance(arch.platform_os, OperatingSystem) )
+ self.assertTrue( isinstance(arch.target, Target) )
+ self.assertTrue( isinstance(new_arch, Arch) )
+ self.assertTrue( isinstance(new_arch.platform, Platform) )
+ self.assertTrue( isinstance(new_arch.platform_os, OperatingSystem) )
+ self.assertTrue( isinstance(new_arch.target, Target) )
+
+
+ def test_sys_type(self):
+ output_platform_class = sys_type()
+ my_arch_class = None
+ if os.path.exists('/opt/cray/craype'):
+ my_platform_class = CrayXc()
+ elif os.path.exists('/bgsys'):
+ my_platform_class = Bgq()
+ elif 'Linux' in py_platform.system():
+ my_platform_class = Linux()
+ elif 'Darwin' in py_platform.system():
+ my_platform_class = Darwin()
+
+ self.assertEqual(str(output_platform_class), str(my_platform_class))
+
+ def test_user_front_end_input(self):
+ """Test when user inputs just frontend that both the frontend target
+ and frontend operating system match
+ """
+ frontend_os = self.platform.operating_system("frontend")
+ frontend_target = self.platform.target("frontend")
+ frontend_spec = Spec("libelf os=frontend target=frontend")
+ frontend_spec.concretize()
+ self.assertEqual(frontend_os, frontend_spec.architecture.platform_os)
+ self.assertEqual(frontend_target, frontend_spec.architecture.target)
+
+ def test_user_back_end_input(self):
+ """Test when user inputs backend that both the backend target and
+ backend operating system match
+ """
+ backend_os = self.platform.operating_system("backend")
+ backend_target = self.platform.target("backend")
+ backend_spec = Spec("libelf os=backend target=backend")
+ backend_spec.concretize()
+ self.assertEqual(backend_os, backend_spec.architecture.platform_os)
+ self.assertEqual(backend_target, backend_spec.architecture.target)
+
+ def test_user_defaults(self):
+ default_os = self.platform.operating_system("default_os")
+ default_target = self.platform.target("default_target")
+
+ default_spec = Spec("libelf") # default is no args
+ default_spec.concretize()
+ self.assertEqual(default_os, default_spec.architecture.platform_os)
+ self.assertEqual(default_target, default_spec.architecture.target)
+
+ def test_user_input_combination(self):
+ os_list = self.platform.operating_sys.keys()
+ target_list = self.platform.targets.keys()
+ additional = ["fe", "be", "frontend", "backend"]
+
+ os_list.extend(additional)
+ target_list.extend(additional)
+
+ combinations = itertools.product(os_list, target_list)
+ results = []
+ for arch in combinations:
+ o,t = arch
+ spec = Spec("libelf os=%s target=%s" % (o, t))
+ spec.concretize()
+ results.append(spec.architecture.platform_os == self.platform.operating_system(o))
+ results.append(spec.architecture.target == self.platform.target(t))
+ res = all(results)
+
+ self.assertTrue(res)
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
new file mode 100644
index 0000000000..371e9650e0
--- /dev/null
+++ b/lib/spack/spack/test/cmd/find.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+
+import spack.cmd.find
+import unittest
+
+
+class Bunch(object):
+
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+
+class FindTest(unittest.TestCase):
+
+ def test_query_arguments(self):
+ query_arguments = spack.cmd.find.query_arguments
+ # Default arguments
+ args = Bunch(only_missing=False, missing=False,
+ unknown=False, explicit=False, implicit=False)
+ q_args = query_arguments(args)
+ self.assertTrue('installed' in q_args)
+ self.assertTrue('known' in q_args)
+ self.assertTrue('explicit' in q_args)
+ self.assertEqual(q_args['installed'], True)
+ self.assertEqual(q_args['known'], any)
+ self.assertEqual(q_args['explicit'], any)
+ # Check that explicit works correctly
+ args.explicit = True
+ q_args = query_arguments(args)
+ self.assertEqual(q_args['explicit'], True)
+ args.explicit = False
+ args.implicit = True
+ q_args = query_arguments(args)
+ self.assertEqual(q_args['explicit'], False)
+ args.explicit = True
+ self.assertRaises(SystemExit, query_arguments, args)
diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py
new file mode 100644
index 0000000000..d89814154b
--- /dev/null
+++ b/lib/spack/spack/test/cmd/test_compiler_cmd.py
@@ -0,0 +1,81 @@
+import os
+import shutil
+from tempfile import mkdtemp
+
+from llnl.util.filesystem import set_executable, mkdirp
+
+import spack.spec
+import spack.cmd.compiler
+import spack.compilers
+from spack.version import Version
+from spack.test.mock_packages_test import *
+
+test_version = '4.5-spacktest'
+
+class MockArgs(object):
+ def __init__(self, add_paths=[], scope=None, compiler_spec=None, all=None):
+ self.add_paths = add_paths
+ self.scope = scope
+ self.compiler_spec = compiler_spec
+ self.all = all
+
+
+def make_mock_compiler():
+ """Make a directory containing a fake, but detectable compiler."""
+ mock_compiler_dir = mkdtemp()
+ bin_dir = os.path.join(mock_compiler_dir, 'bin')
+ mkdirp(bin_dir)
+
+ gcc_path = os.path.join(bin_dir, 'gcc')
+ gxx_path = os.path.join(bin_dir, 'g++')
+ gfortran_path = os.path.join(bin_dir, 'gfortran')
+
+ with open(gcc_path, 'w') as f:
+ f.write("""\
+#!/bin/sh
+
+for arg in "$@"; do
+ if [ "$arg" = -dumpversion ]; then
+ echo '%s'
+ fi
+done
+""" % test_version)
+
+ # Create some mock compilers in the temporary directory
+ set_executable(gcc_path)
+ shutil.copy(gcc_path, gxx_path)
+ shutil.copy(gcc_path, gfortran_path)
+
+ return mock_compiler_dir
+
+
+class CompilerCmdTest(MockPackagesTest):
+ """ Test compiler commands for add and remove """
+
+
+ def test_compiler_remove(self):
+ args = MockArgs(all=True, compiler_spec='gcc@4.5.0')
+ spack.cmd.compiler.compiler_remove(args)
+ compilers = spack.compilers.all_compilers()
+ self.assertTrue(spack.spec.CompilerSpec("gcc@4.5.0") not in compilers)
+
+
+ def test_compiler_add(self):
+ # compilers available by default.
+ old_compilers = set(spack.compilers.all_compilers())
+
+ # add our new compiler and find again.
+ compiler_dir = make_mock_compiler()
+
+ try:
+ args = MockArgs(add_paths=[compiler_dir])
+ spack.cmd.compiler.compiler_find(args)
+
+ # ensure new compiler is in there
+ new_compilers = set(spack.compilers.all_compilers())
+ new_compiler = new_compilers - old_compilers
+ self.assertTrue(new_compiler)
+ self.assertTrue(new_compiler.pop().version == Version(test_version))
+
+ finally:
+ shutil.rmtree(compiler_dir, ignore_errors=True)
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 963481054e..ab201f406a 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
+import spack.architecture
from spack.spec import Spec, CompilerSpec
from spack.version import ver
from spack.concretize import find_spec
@@ -253,6 +254,19 @@ class ConcretizeTest(MockPackagesTest):
self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
+ def test_external_package_module(self):
+ # No tcl modules on darwin/linux machines
+ # TODO: improved way to check for this.
+ if (spack.architecture.sys_type().name == 'darwin' or
+ spack.architecture.sys_type().name == 'linux'):
+ return
+
+ spec = Spec('externalmodule')
+ spec.concretize()
+ self.assertEqual(spec['externalmodule'].external_module, 'external-module')
+ self.assertFalse('externalprereq' in spec)
+ self.assertTrue(spec['externalmodule'].compiler.satisfies('gcc'))
+
def test_nobuild_package(self):
got_error = False
spec = Spec('externaltool%clang')
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index eff482f4c6..252d77e66b 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -32,45 +32,75 @@ from ordereddict_backport import OrderedDict
from spack.test.mock_packages_test import *
# Some sample compiler config data
-a_comps = {
- "x86_64_E5v2_IntelIB": {
- "gcc@4.7.3" : {
+a_comps = [
+ {'compiler': {
+ 'paths': {
"cc" : "/gcc473",
"cxx": "/g++473",
"f77": None,
- "fc" : None },
- "gcc@4.5.0" : {
+ "fc" : None
+ },
+ 'modules': None,
+ 'spec': 'gcc@4.7.3',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
"cc" : "/gcc450",
"cxx": "/g++450",
- "f77": "/gfortran",
- "fc" : "/gfortran" },
- "clang@3.3" : {
+ "f77": 'gfortran',
+ "fc" : 'gfortran'
+ },
+ 'modules': None,
+ 'spec': 'gcc@4.5.0',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
"cc" : "<overwritten>",
"cxx": "<overwritten>",
- "f77": "<overwritten>",
- "fc" : "<overwritten>" }
- }
-}
-
-b_comps = {
- "x86_64_E5v3": {
- "icc@10.0" : {
+ "f77": '<overwritten>',
+ "fc" : '<overwritten>' },
+ 'modules': None,
+ 'spec': 'clang@3.3',
+ 'operating_system': 'CNL10'
+ }}
+]
+
+b_comps = [
+ {'compiler': {
+ 'paths': {
"cc" : "/icc100",
- "cxx": "/icc100",
+ "cxx": "/icp100",
"f77": None,
- "fc" : None },
- "icc@11.1" : {
+ "fc" : None
+ },
+ 'modules': None,
+ 'spec': 'icc@10.0',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
"cc" : "/icc111",
"cxx": "/icp111",
- "f77": "/ifort",
- "fc" : "/ifort" },
- "clang@3.3" : {
- "cc" : "/clang",
- "cxx": "/clang++",
- "f77": None,
- "fc" : None}
- }
-}
+ "f77": 'ifort',
+ "fc" : 'ifort'
+ },
+ 'modules': None,
+ 'spec': 'icc@11.1',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc" : "<overwritten>",
+ "cxx": "<overwritten>",
+ "f77": '<overwritten>',
+ "fc" : '<overwritten>' },
+ 'modules': None,
+ 'spec': 'clang@3.3',
+ 'operating_system': 'CNL10'
+ }}
+]
# Some Sample repo data
repos_low = [ "/some/path" ]
@@ -89,15 +119,28 @@ class ConfigTest(MockPackagesTest):
super(ConfigTest, self).tearDown()
shutil.rmtree(self.tmp_dir, True)
- def check_config(self, comps, arch, *compiler_names):
+
+ def check_config(self, comps, *compiler_names):
"""Check that named compilers in comps match Spack's config."""
config = spack.config.get_config('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'fc']
- for key in compiler_names:
- for c in compiler_list:
- expected = comps[arch][key][c]
- actual = config[arch][key][c]
- self.assertEqual(expected, actual)
+ param_list = ['modules', 'paths', 'spec', 'operating_system']
+ for compiler in config:
+ conf = compiler['compiler']
+ if conf['spec'] in compiler_names:
+ comp = None
+ for c in comps:
+ if c['compiler']['spec'] == conf['spec']:
+ comp = c['compiler']
+ break
+ if not comp:
+ self.fail('Bad config spec')
+ for p in param_list:
+ self.assertEqual(conf[p], comp[p])
+ for c in compiler_list:
+ expected = comp['paths'][c]
+ actual = conf['paths'][c]
+ self.assertEqual(expected, actual)
def test_write_list_in_memory(self):
spack.config.update_config('repos', repos_low, 'test_low_priority')
@@ -111,8 +154,9 @@ class ConfigTest(MockPackagesTest):
spack.config.update_config('compilers', b_comps, 'test_high_priority')
# Make sure the config looks how we expect.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
+
def test_write_key_to_disk(self):
# Write b_comps "on top of" a_comps.
@@ -123,8 +167,8 @@ class ConfigTest(MockPackagesTest):
spack.config.clear_config_caches()
# Same check again, to ensure consistency.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
def test_write_to_same_priority_file(self):
# Write b_comps in the same file as a_comps.
@@ -135,5 +179,5 @@ class ConfigTest(MockPackagesTest):
spack.config.clear_config_caches()
# Same check again, to ensure consistency.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py
index ded1539e18..a0d959db2f 100644
--- a/lib/spack/spack/test/environment.py
+++ b/lib/spack/spack/test/environment.py
@@ -24,17 +24,20 @@
##############################################################################
import unittest
import os
+import copy
from spack.environment import EnvironmentModifications
class EnvironmentTest(unittest.TestCase):
def setUp(self):
- os.environ.clear()
os.environ['UNSET_ME'] = 'foo'
os.environ['EMPTY_PATH_LIST'] = ''
os.environ['PATH_LIST'] = '/path/second:/path/third'
os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
+ def tearDown(self):
+ pass
+
def test_set(self):
env = EnvironmentModifications()
env.set('A', 'dummy value')
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index 595667bf35..a56bd8ebdc 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -34,20 +34,127 @@ from ordereddict_backport import OrderedDict
from spack.repository import RepoPath
from spack.spec import Spec
+platform = spack.architecture.sys_type()
+
+linux_os_name = 'debian'
+linux_os_version = '6'
+
+if platform.name == 'linux':
+ linux_os = platform.operating_system("default_os")
+ linux_os_name = linux_os.name
+ linux_os_version = linux_os.version
+
mock_compiler_config = """\
compilers:
- all:
- clang@3.3:
+- compiler:
+ spec: clang@3.3
+ operating_system: {0}{1}
+ paths:
cc: /path/to/clang
cxx: /path/to/clang++
f77: None
fc: None
- gcc@4.5.0:
+ modules: 'None'
+- compiler:
+ spec: gcc@4.5.0
+ operating_system: {0}{1}
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: CNL10
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: SuSE11
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: redhat6
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: yosemite
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ paths:
cc: /path/to/gcc
cxx: /path/to/g++
f77: /path/to/gfortran
fc: /path/to/gfortran
-"""
+ operating_system: CNL10
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: SuSE11
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: redhat6
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: yosemite
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: elcapitan
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: elcapitan
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+""".format(linux_os_name, linux_os_version)
mock_packages_config = """\
packages:
@@ -60,6 +167,10 @@ packages:
paths:
externalvirtual@2.0%clang@3.3: /path/to/external_virtual_clang
externalvirtual@1.0%gcc@4.5.0: /path/to/external_virtual_gcc
+ externalmodule:
+ buildable: False
+ modules:
+ externalmodule@1.0%gcc@4.5.0: external-module
"""
class MockPackagesTest(unittest.TestCase):
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index c73badf8f2..582e067860 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -73,7 +73,7 @@ configuration_alter_environment = {
'all': {
'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
},
- 'arch=x86-linux': {
+ 'platform=test target=x86_64': {
'environment': {'set': {'FOO': 'foo'},
'unset': ['BAR']}
}
@@ -116,6 +116,7 @@ class TclTests(MockPackagesTest):
def get_modulefile_content(self, spec):
spec.concretize()
+ print spec, '&&&&&'
generator = spack.modules.TclModule(spec)
generator.write()
content = FILE_REGISTRY[generator.file_name].split('\n')
@@ -123,27 +124,28 @@ class TclTests(MockPackagesTest):
def test_simple_case(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpich@3.0.4 arch=x86-linux')
+ spec = spack.spec.Spec('mpich@3.0.4')
content = self.get_modulefile_content(spec)
self.assertTrue('module-whatis "mpich @3.0.4"' in content)
def test_autoload(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
spack.modules.CONFIGURATION = configuration_autoload_all
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
def test_alter_environment(self):
spack.modules.CONFIGURATION = configuration_alter_environment
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
content = self.get_modulefile_content(spec)
+ print content
self.assertEqual(
len([x
for x in content
@@ -152,8 +154,9 @@ class TclTests(MockPackagesTest):
len([x for x in content if 'setenv FOO "foo"' in x]), 1)
self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
- spec = spack.spec.Spec('libdwarf arch=x64-linux')
+ spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32')
content = self.get_modulefile_content(spec)
+ print content
self.assertEqual(
len([x
for x in content
@@ -164,14 +167,14 @@ class TclTests(MockPackagesTest):
def test_blacklist(self):
spack.modules.CONFIGURATION = configuration_blacklist
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
def test_conflicts(self):
spack.modules.CONFIGURATION = configuration_conflicts
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x for x in content if x.startswith('conflict')]), 2)
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index a33656adcc..034e6b3923 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -92,21 +92,18 @@ class MultiMethodTest(MockPackagesTest):
self.assertEqual(pkg.has_a_default(), 'default')
- def test_architecture_match(self):
- pkg = spack.repo.get('multimethod arch=x86_64')
- self.assertEqual(pkg.different_by_architecture(), 'x86_64')
-
- pkg = spack.repo.get('multimethod arch=ppc64')
- self.assertEqual(pkg.different_by_architecture(), 'ppc64')
-
- pkg = spack.repo.get('multimethod arch=ppc32')
- self.assertEqual(pkg.different_by_architecture(), 'ppc32')
-
- pkg = spack.repo.get('multimethod arch=arm64')
- self.assertEqual(pkg.different_by_architecture(), 'arm64')
-
- pkg = spack.repo.get('multimethod arch=macos')
- self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
+ def test_target_match(self):
+ platform = spack.architecture.sys_type()
+ targets = platform.targets.values()
+ for target in targets[:-1]:
+ pkg = spack.repo.get('multimethod target='+target.name)
+ self.assertEqual(pkg.different_by_target(), target.name)
+
+ pkg = spack.repo.get('multimethod target='+targets[-1].name)
+ if len(targets) == 1:
+ self.assertEqual(pkg.different_by_target(), targets[-1].name)
+ else:
+ self.assertRaises(NoSuchMethodError, pkg.different_by_target)
def test_dependency_match(self):
diff --git a/lib/spack/spack/test/operating_system.py b/lib/spack/spack/test/operating_system.py
new file mode 100644
index 0000000000..ed5f6ff8ad
--- /dev/null
+++ b/lib/spack/spack/test/operating_system.py
@@ -0,0 +1,55 @@
+""" Test checks if the operating_system class is created correctly and that
+the functions are using the correct operating_system. Also checks whether
+the operating_system correctly uses the compiler_strategy
+"""
+
+import unittest
+import os
+import platform
+from spack.platforms.cray_xc import CrayXc
+from spack.platforms.linux import Linux
+from spack.platforms.darwin import Darwin
+from spack.operating_system.linux_distro import LinuxDistro
+from spack.operating_system.mac_os import MacOs
+from spack.operating_system.cnl import ComputeNodeLinux
+
+class TestOperatingSystem(unittest.TestCase):
+
+ def setUp(self):
+ cray_xc = CrayXc()
+ linux = Linux()
+ darwin = Darwin()
+ self.cray_operating_sys = cray_xc.operating_system('front_os')
+ self.cray_default_os = cray_xc.operating_system('default_os')
+ self.cray_back_os = cray_xc.operating_system('back_os')
+ self.darwin_operating_sys = darwin.operating_system('default_os')
+ self.linux_operating_sys = linux.operating_system('default_os')
+
+ def test_cray_front_end_operating_system(self):
+ self.assertIsInstance(self.cray_operating_sys, LinuxDistro)
+
+ def test_cray_front_end_compiler_strategy(self):
+ self.assertEquals(self.cray_operating_sys.compiler_strategy, "PATH")
+
+ def test_cray_back_end_operating_system(self):
+ self.assertIsInstance(self.cray_back_os,ComputeNodeLinux)
+
+ def test_cray_back_end_compiler_strategy(self):
+ self.assertEquals(self.cray_back_os.compiler_strategy, "MODULES")
+
+ def test_linux_operating_system(self):
+ self.assertIsInstance(self.linux_operating_sys, LinuxDistro)
+
+ def test_linux_compiler_strategy(self):
+ self.assertEquals(self.linux_operating_sys.compiler_strategy, "PATH")
+
+
+ def test_cray_front_end_compiler_list(self):
+ """ Operating systems will now be in charge of finding compilers.
+ So, depending on which operating system you want to build for
+ or which operating system you are on, then you could detect
+ compilers in a certain way. Cray linux environment on the front
+ end is just a regular linux distro whereas the Cray linux compute
+ node is a stripped down version which modules are important
+ """
+ self.assertEquals(True, False)
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 52f4f7395e..712f07ac4d 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -29,6 +29,7 @@ You can find the dummy packages here::
spack/lib/spack/spack/test/mock_packages
"""
import spack
+import spack.architecture
import spack.package
from llnl.util.lang import list_modules
@@ -241,8 +242,10 @@ class SpecDagTest(MockPackagesTest):
def test_unsatisfiable_architecture(self):
- self.set_pkg_dep('mpileaks', 'mpich arch=bgqos_0')
- spec = Spec('mpileaks ^mpich arch=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
+ platform = spack.architecture.sys_type()
+
+ self.set_pkg_dep('mpileaks', 'mpich platform=test target=be')
+ spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath ^dyninst ^libelf ^libdwarf')
self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 0cb78b90ed..9876bfd5a8 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import unittest
+import spack.architecture
from spack.spec import *
from spack.test.mock_packages_test import *
@@ -107,7 +108,8 @@ class SpecSematicsTest(MockPackagesTest):
def test_satisfies_namespaced_dep(self):
- """Ensure spec from same or unspecified namespace satisfies namespace constraint."""
+ """Ensure spec from same or unspecified namespace satisfies namespace
+ constraint."""
self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
@@ -139,11 +141,16 @@ class SpecSematicsTest(MockPackagesTest):
def test_satisfies_architecture(self):
- self.check_satisfies('foo arch=chaos_5_x86_64_ib', ' arch=chaos_5_x86_64_ib')
- self.check_satisfies('foo arch=bgqos_0', ' arch=bgqos_0')
-
- self.check_unsatisfiable('foo arch=bgqos_0', ' arch=chaos_5_x86_64_ib')
- self.check_unsatisfiable('foo arch=chaos_5_x86_64_ib', ' arch=bgqos_0')
+ platform = spack.architecture.sys_type()
+ self.check_satisfies(
+ 'foo platform=test target=frontend os=frontend',
+ 'platform=test target=frontend os=frontend')
+ self.check_satisfies(
+ 'foo platform=test target=backend os=backend',
+ 'platform=test target=backend', 'platform=test os=backend')
+ self.check_satisfies(
+ 'foo platform=test target=default_target os=default_os',
+ 'platform=test target=default_target os=default_os')
def test_satisfies_dependencies(self):
@@ -158,10 +165,14 @@ class SpecSematicsTest(MockPackagesTest):
self.check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3')
self.check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0')
- self.check_satisfies('mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
+ self.check_satisfies(
+ 'mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+ self.check_unsatisfiable(
+ 'mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+ self.check_unsatisfiable(
+ 'mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
+ self.check_unsatisfiable(
+ 'mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
def test_satisfies_virtual_dependencies(self):
@@ -350,10 +361,13 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cflags="-O3" cppflags="-Wall"')
- def test_constrain_arch(self):
- self.check_constrain('libelf arch=bgqos_0', 'libelf arch=bgqos_0', 'libelf arch=bgqos_0')
- self.check_constrain('libelf arch=bgqos_0', 'libelf', 'libelf arch=bgqos_0')
-
+ def test_constrain_architecture(self):
+ self.check_constrain('libelf target=default_target os=default_os',
+ 'libelf target=default_target os=default_os',
+ 'libelf target=default_target os=default_os')
+ self.check_constrain('libelf target=default_target os=default_os',
+ 'libelf',
+ 'libelf target=default_target os=default_os')
def test_constrain_compiler(self):
self.check_constrain('libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7')
@@ -369,9 +383,8 @@ class SpecSematicsTest(MockPackagesTest):
self.check_invalid_constraint('libelf debug=2', 'libelf debug=1')
self.check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"')
-
- self.check_invalid_constraint('libelf arch=bgqos_0', 'libelf arch=x86_54')
-
+ self.check_invalid_constraint('libelf platform=test target=be os=be',
+ 'libelf target=fe os=fe')
def test_constrain_changed(self):
self.check_constrain_changed('libelf', '@1.0')
@@ -382,7 +395,10 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_changed('libelf', '~debug')
self.check_constrain_changed('libelf', 'debug=2')
self.check_constrain_changed('libelf', 'cppflags="-O3"')
- self.check_constrain_changed('libelf', ' arch=bgqos_0')
+
+ platform = spack.architecture.sys_type()
+ self.check_constrain_changed('libelf', 'target='+platform.target('default_target').name)
+ self.check_constrain_changed('libelf', 'os='+platform.operating_system('default_os').name)
def test_constrain_not_changed(self):
@@ -395,9 +411,10 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_not_changed('libelf~debug', '~debug')
self.check_constrain_not_changed('libelf debug=2', 'debug=2')
self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"')
- self.check_constrain_not_changed('libelf arch=bgqos_0', ' arch=bgqos_0')
- self.check_constrain_not_changed('libelf^foo', 'libelf^foo')
- self.check_constrain_not_changed('libelf^foo^bar', 'libelf^foo^bar')
+
+ platform = spack.architecture.sys_type()
+ default_target = platform.target('default_target').name
+ self.check_constrain_not_changed('libelf target='+default_target, 'target='+default_target)
def test_constrain_dependency_changed(self):
@@ -407,8 +424,10 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
self.check_constrain_changed('libelf^foo', 'libelf^foo+debug')
self.check_constrain_changed('libelf^foo', 'libelf^foo~debug')
- self.check_constrain_changed('libelf^foo', 'libelf^foo cppflags="-O3"')
- self.check_constrain_changed('libelf^foo', 'libelf^foo arch=bgqos_0')
+
+ platform = spack.architecture.sys_type()
+ default_target = platform.target('default_target').name
+ self.check_constrain_changed('libelf^foo', 'libelf^foo target='+default_target)
def test_constrain_dependency_not_changed(self):
@@ -419,5 +438,7 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
- self.check_constrain_not_changed('libelf^foo arch=bgqos_0', 'libelf^foo arch=bgqos_0')
+ platform = spack.architecture.sys_type()
+ default_target = platform.target('default_target').name
+ self.check_constrain_not_changed('libelf^foo target='+default_target, 'libelf^foo target='+default_target)
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index c4e4c9cdfe..4a534d7b5c 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -58,7 +58,7 @@ class SpecSyntaxTest(unittest.TestCase):
# ================================================================================
# Parse checks
# ================================================================================
- def check_parse(self, expected, spec=None):
+ def check_parse(self, expected, spec=None, remove_arch=True):
"""Assert that the provided spec is able to be parsed.
If this is called with one argument, it assumes that the string is
canonical (i.e., no spaces and ~ instead of - for variants) and that it
@@ -70,6 +70,7 @@ class SpecSyntaxTest(unittest.TestCase):
if spec is None:
spec = expected
output = spack.spec.parse(spec)
+
parsed = (" ".join(str(spec) for spec in output))
self.assertEqual(expected, parsed)
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index a026403e2e..4624f901c8 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -43,7 +43,6 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a > b)
self.assertFalse(a >= b)
-
def assert_ver_gt(self, a, b):
a, b = ver(a), ver(b)
self.assertTrue(a > b)
@@ -53,7 +52,6 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a < b)
self.assertFalse(a <= b)
-
def assert_ver_eq(self, a, b):
a, b = ver(a), ver(b)
self.assertFalse(a > b)
@@ -63,55 +61,43 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a < b)
self.assertTrue(a <= b)
-
def assert_in(self, needle, haystack):
self.assertTrue(ver(needle) in ver(haystack))
-
def assert_not_in(self, needle, haystack):
self.assertFalse(ver(needle) in ver(haystack))
-
def assert_canonical(self, canonical_list, version_list):
self.assertEqual(ver(canonical_list), ver(version_list))
-
def assert_overlaps(self, v1, v2):
self.assertTrue(ver(v1).overlaps(ver(v2)))
-
def assert_no_overlap(self, v1, v2):
self.assertFalse(ver(v1).overlaps(ver(v2)))
-
def assert_satisfies(self, v1, v2):
self.assertTrue(ver(v1).satisfies(ver(v2)))
-
def assert_does_not_satisfy(self, v1, v2):
self.assertFalse(ver(v1).satisfies(ver(v2)))
-
def check_intersection(self, expected, a, b):
self.assertEqual(ver(expected), ver(a).intersection(ver(b)))
-
def check_union(self, expected, a, b):
self.assertEqual(ver(expected), ver(a).union(ver(b)))
-
def test_two_segments(self):
self.assert_ver_eq('1.0', '1.0')
self.assert_ver_lt('1.0', '2.0')
self.assert_ver_gt('2.0', '1.0')
-
def test_three_segments(self):
self.assert_ver_eq('2.0.1', '2.0.1')
self.assert_ver_lt('2.0', '2.0.1')
self.assert_ver_gt('2.0.1', '2.0')
-
def test_alpha(self):
# TODO: not sure whether I like this. 2.0.1a is *usually*
# TODO: less than 2.0.1, but special-casing it makes version
@@ -120,7 +106,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_gt('2.0.1a', '2.0.1')
self.assert_ver_lt('2.0.1', '2.0.1a')
-
def test_patch(self):
self.assert_ver_eq('5.5p1', '5.5p1')
self.assert_ver_lt('5.5p1', '5.5p2')
@@ -129,7 +114,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('5.5p1', '5.5p10')
self.assert_ver_gt('5.5p10', '5.5p1')
-
def test_num_alpha_with_no_separator(self):
self.assert_ver_lt('10xyz', '10.1xyz')
self.assert_ver_gt('10.1xyz', '10xyz')
@@ -137,7 +121,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('xyz10', 'xyz10.1')
self.assert_ver_gt('xyz10.1', 'xyz10')
-
def test_alpha_with_dots(self):
self.assert_ver_eq('xyz.4', 'xyz.4')
self.assert_ver_lt('xyz.4', '8')
@@ -145,30 +128,25 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('xyz.4', '2')
self.assert_ver_gt('2', 'xyz.4')
-
def test_nums_and_patch(self):
self.assert_ver_lt('5.5p2', '5.6p1')
self.assert_ver_gt('5.6p1', '5.5p2')
self.assert_ver_lt('5.6p1', '6.5p1')
self.assert_ver_gt('6.5p1', '5.6p1')
-
def test_rc_versions(self):
self.assert_ver_gt('6.0.rc1', '6.0')
self.assert_ver_lt('6.0', '6.0.rc1')
-
def test_alpha_beta(self):
self.assert_ver_gt('10b2', '10a1')
self.assert_ver_lt('10a2', '10b2')
-
def test_double_alpha(self):
self.assert_ver_eq('1.0aa', '1.0aa')
self.assert_ver_lt('1.0a', '1.0aa')
self.assert_ver_gt('1.0aa', '1.0a')
-
def test_padded_numbers(self):
self.assert_ver_eq('10.0001', '10.0001')
self.assert_ver_eq('10.0001', '10.1')
@@ -176,24 +154,20 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('10.0001', '10.0039')
self.assert_ver_gt('10.0039', '10.0001')
-
def test_close_numbers(self):
self.assert_ver_lt('4.999.9', '5.0')
self.assert_ver_gt('5.0', '4.999.9')
-
def test_date_stamps(self):
self.assert_ver_eq('20101121', '20101121')
self.assert_ver_lt('20101121', '20101122')
self.assert_ver_gt('20101122', '20101121')
-
def test_underscores(self):
self.assert_ver_eq('2_0', '2_0')
self.assert_ver_eq('2.0', '2_0')
self.assert_ver_eq('2_0', '2.0')
-
def test_rpm_oddities(self):
self.assert_ver_eq('1b.fc17', '1b.fc17')
self.assert_ver_lt('1b.fc17', '1.fc17')
@@ -202,7 +176,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_gt('1g.fc17', '1.fc17')
self.assert_ver_lt('1.fc17', '1g.fc17')
-
# Stuff below here is not taken from RPM's tests and is
# unique to spack
def test_version_ranges(self):
@@ -214,7 +187,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('1.2:1.4', '1.5:1.6')
self.assert_ver_gt('1.5:1.6', '1.2:1.4')
-
def test_contains(self):
self.assert_in('1.3', '1.2:1.4')
self.assert_in('1.2.5', '1.2:1.4')
@@ -233,7 +205,6 @@ class VersionsTest(unittest.TestCase):
self.assert_in('1.4.1', '1.2.7:1.4')
self.assert_not_in('1.4.1', '1.2.7:1.4.0')
-
def test_in_list(self):
self.assert_in('1.2', ['1.5', '1.2', '1.3'])
self.assert_in('1.2.5', ['1.5', '1.2:1.3'])
@@ -245,7 +216,6 @@ class VersionsTest(unittest.TestCase):
self.assert_not_in('1.2.5:1.5', ['1.5', '1.2:1.3'])
self.assert_not_in('1.1:1.2.5', ['1.5', '1.2:1.3'])
-
def test_ranges_overlap(self):
self.assert_overlaps('1.2', '1.2')
self.assert_overlaps('1.2.1', '1.2.1')
@@ -262,7 +232,6 @@ class VersionsTest(unittest.TestCase):
self.assert_overlaps(':', '1.6:1.9')
self.assert_overlaps('1.6:1.9', ':')
-
def test_overlap_with_containment(self):
self.assert_in('1.6.5', '1.6')
self.assert_in('1.6.5', ':1.6')
@@ -273,7 +242,6 @@ class VersionsTest(unittest.TestCase):
self.assert_not_in(':1.6', '1.6.5')
self.assert_in('1.6.5', ':1.6')
-
def test_lists_overlap(self):
self.assert_overlaps('1.2b:1.7,5', '1.6:1.9,1')
self.assert_overlaps('1,2,3,4,5', '3,4,5,6,7')
@@ -287,7 +255,6 @@ class VersionsTest(unittest.TestCase):
self.assert_no_overlap('1,2,3,4,5', '6,7')
self.assert_no_overlap('1,2,3,4,5', '6:7')
-
def test_canonicalize_list(self):
self.assert_canonical(['1.2', '1.3', '1.4'],
['1.2', '1.3', '1.3', '1.4'])
@@ -316,7 +283,6 @@ class VersionsTest(unittest.TestCase):
self.assert_canonical([':'],
[':,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
-
def test_intersection(self):
self.check_intersection('2.5',
'1.0:2.5', '2.5:3.0')
@@ -325,12 +291,11 @@ class VersionsTest(unittest.TestCase):
self.check_intersection('0:1', ':', '0:1')
self.check_intersection(['1.0', '2.5:2.7'],
- ['1.0:2.7'], ['2.5:3.0','1.0'])
+ ['1.0:2.7'], ['2.5:3.0', '1.0'])
self.check_intersection(['2.5:2.7'],
- ['1.1:2.7'], ['2.5:3.0','1.0'])
+ ['1.1:2.7'], ['2.5:3.0', '1.0'])
self.check_intersection(['0:1'], [':'], ['0:1'])
-
def test_intersect_with_containment(self):
self.check_intersection('1.6.5', '1.6.5', ':1.6')
self.check_intersection('1.6.5', ':1.6', '1.6.5')
@@ -338,7 +303,6 @@ class VersionsTest(unittest.TestCase):
self.check_intersection('1.6:1.6.5', ':1.6.5', '1.6')
self.check_intersection('1.6:1.6.5', '1.6', ':1.6.5')
-
def test_union_with_containment(self):
self.check_union(':1.6', '1.6.5', ':1.6')
self.check_union(':1.6', ':1.6', '1.6.5')
@@ -346,8 +310,6 @@ class VersionsTest(unittest.TestCase):
self.check_union(':1.6', ':1.6.5', '1.6')
self.check_union(':1.6', '1.6', ':1.6.5')
-
- def test_union_with_containment(self):
self.check_union(':', '1.0:', ':2.0')
self.check_union('1:4', '1:3', '2:4')
@@ -356,7 +318,6 @@ class VersionsTest(unittest.TestCase):
# Tests successor/predecessor case.
self.check_union('1:4', '1:2', '3:4')
-
def test_basic_version_satisfaction(self):
self.assert_satisfies('4.7.3', '4.7.3')
@@ -372,7 +333,6 @@ class VersionsTest(unittest.TestCase):
self.assert_does_not_satisfy('4.8', '4.9')
self.assert_does_not_satisfy('4', '4.9')
-
def test_basic_version_satisfaction_in_lists(self):
self.assert_satisfies(['4.7.3'], ['4.7.3'])
@@ -388,7 +348,6 @@ class VersionsTest(unittest.TestCase):
self.assert_does_not_satisfy(['4.8'], ['4.9'])
self.assert_does_not_satisfy(['4'], ['4.9'])
-
def test_version_range_satisfaction(self):
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_satisfies('4.3.0', '4.3:4.7')
@@ -400,7 +359,6 @@ class VersionsTest(unittest.TestCase):
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
-
def test_version_range_satisfaction_in_lists(self):
self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
self.assert_satisfies(['4.3.0'], ['4.3:4.7'])
@@ -423,3 +381,11 @@ class VersionsTest(unittest.TestCase):
self.assert_satisfies('4.8.0', '4.2, 4.3:4.8')
self.assert_satisfies('4.8.2', '4.2, 4.3:4.8')
+
+ def test_formatted_strings(self):
+ versions = '1.2.3', '1_2_3', '1-2-3'
+ for item in versions:
+ v = Version(item)
+ self.assertEqual(v.dotted, '1.2.3')
+ self.assertEqual(v.dashed, '1-2-3')
+ self.assertEqual(v.underscored, '1_2_3')
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index 38b778fa00..14b56e8d6c 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -165,6 +165,7 @@ class Executable(object):
raise ProcessError("Command exited with status %d:" %
proc.returncode, cmd_line)
+
if output is str or error is str:
result = ''
if output is str:
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 247f6d2362..858d581472 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -43,16 +43,16 @@ be called on any of the types::
intersection
concrete
"""
-import os
-import sys
import re
from bisect import bisect_left
from functools import wraps
+
from functools_backport import total_ordering
# Valid version characters
VALID_VERSION = r'[A-Za-z0-9_.-]'
+
def int_if_int(string):
"""Convert a string to int if possible. Otherwise, return a string."""
try:
@@ -62,10 +62,11 @@ def int_if_int(string):
def coerce_versions(a, b):
- """Convert both a and b to the 'greatest' type between them, in this order:
+ """
+ Convert both a and b to the 'greatest' type between them, in this order:
Version < VersionRange < VersionList
- This is used to simplify comparison operations below so that we're always
- comparing things that are of the same type.
+ This is used to simplify comparison operations below so that we're always
+ comparing things that are of the same type.
"""
order = (Version, VersionRange, VersionList)
ta, tb = type(a), type(b)
@@ -105,6 +106,7 @@ def coerced(method):
@total_ordering
class Version(object):
"""Class to represent versions"""
+
def __init__(self, string):
string = str(string)
@@ -124,6 +126,17 @@ class Version(object):
# last element of separators is ''
self.separators = tuple(re.split(segment_regex, string)[1:-1])
+ @property
+ def dotted(self):
+ return '.'.join(str(x) for x in self.version)
+
+ @property
+ def underscored(self):
+ return '_'.join(str(x) for x in self.version)
+
+ @property
+ def dashed(self):
+ return '-'.join(str(x) for x in self.version)
def up_to(self, index):
"""Return a version string up to the specified component, exclusive.
@@ -131,15 +144,12 @@ class Version(object):
"""
return '.'.join(str(x) for x in self[:index])
-
def lowest(self):
return self
-
def highest(self):
return self
-
@coerced
def satisfies(self, other):
"""A Version 'satisfies' another if it is at least as specific and has a
@@ -147,11 +157,10 @@ class Version(object):
gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
a suitable compiler.
"""
- nself = len(self.version)
+ nself = len(self.version)
nother = len(other.version)
return nother <= nself and self.version[:nother] == other.version
-
def wildcard(self):
"""Create a regex that will match variants of this version string."""
def a_or_n(seg):
@@ -181,28 +190,22 @@ class Version(object):
wc += '(?:[a-z]|alpha|beta)?)?' * (len(segments) - 1)
return wc
-
def __iter__(self):
return iter(self.version)
-
def __getitem__(self, idx):
return tuple(self.version[idx])
-
def __repr__(self):
return self.string
-
def __str__(self):
return self.string
-
@property
def concrete(self):
return self
-
@coerced
def __lt__(self, other):
"""Version comparison is designed for consistency with the way RPM
@@ -235,28 +238,23 @@ class Version(object):
# If the common prefix is equal, the one with more segments is bigger.
return len(self.version) < len(other.version)
-
@coerced
def __eq__(self, other):
return (other is not None and
type(other) == Version and self.version == other.version)
-
def __ne__(self, other):
return not (self == other)
-
def __hash__(self):
return hash(self.version)
-
@coerced
def __contains__(self, other):
if other is None:
return False
return other.version[:len(self.version)] == self.version
-
def is_predecessor(self, other):
"""True if the other version is the immediate predecessor of this one.
That is, NO versions v exist such that:
@@ -269,16 +267,13 @@ class Version(object):
ol = other.version[-1]
return type(sl) == int and type(ol) == int and (ol - sl == 1)
-
def is_successor(self, other):
return other.is_predecessor(self)
-
@coerced
def overlaps(self, other):
return self in other or other in self
-
@coerced
def union(self, other):
if self == other or other in self:
@@ -288,7 +283,6 @@ class Version(object):
else:
return VersionList([self, other])
-
@coerced
def intersection(self, other):
if self == other:
@@ -299,6 +293,7 @@ class Version(object):
@total_ordering
class VersionRange(object):
+
def __init__(self, start, end):
if isinstance(start, basestring):
start = Version(start)
@@ -310,15 +305,12 @@ class VersionRange(object):
if start and end and end < start:
raise ValueError("Invalid Version range: %s" % self)
-
def lowest(self):
return self.start
-
def highest(self):
return self.end
-
@coerced
def __lt__(self, other):
"""Sort VersionRanges lexicographically so that they are ordered first
@@ -331,28 +323,24 @@ class VersionRange(object):
s, o = self, other
if s.start != o.start:
- return s.start is None or (o.start is not None and s.start < o.start)
+ return s.start is None or (o.start is not None and s.start < o.start) # NOQA: ignore=E501
return (s.end != o.end and
o.end is None or (s.end is not None and s.end < o.end))
-
@coerced
def __eq__(self, other):
return (other is not None and
type(other) == VersionRange and
self.start == other.start and self.end == other.end)
-
def __ne__(self, other):
return not (self == other)
-
@property
def concrete(self):
return self.start if self.start == self.end else None
-
@coerced
def __contains__(self, other):
if other is None:
@@ -373,57 +361,55 @@ class VersionRange(object):
other.end in self.end)))
return in_upper
-
@coerced
def satisfies(self, other):
- """A VersionRange satisfies another if some version in this range
- would satisfy some version in the other range. To do this it must
- either:
- a) Overlap with the other range
- b) The start of this range satisfies the end of the other range.
-
- This is essentially the same as overlaps(), but overlaps assumes
- that its arguments are specific. That is, 4.7 is interpreted as
- 4.7.0.0.0.0... . This funciton assumes that 4.7 woudl be satisfied
- by 4.7.3.5, etc.
-
- Rationale:
- If a user asks for gcc@4.5:4.7, and a package is only compatible with
- gcc@4.7.3:4.8, then that package should be able to build under the
- constraints. Just using overlaps() would not work here.
-
- Note that we don't need to check whether the end of this range
- would satisfy the start of the other range, because overlaps()
- already covers that case.
-
- Note further that overlaps() is a symmetric operation, while
- satisfies() is not.
+ """
+ A VersionRange satisfies another if some version in this range
+ would satisfy some version in the other range. To do this it must
+ either:
+ a) Overlap with the other range
+ b) The start of this range satisfies the end of the other range.
+
+ This is essentially the same as overlaps(), but overlaps assumes
+ that its arguments are specific. That is, 4.7 is interpreted as
+ 4.7.0.0.0.0... . This funciton assumes that 4.7 woudl be satisfied
+ by 4.7.3.5, etc.
+
+ Rationale:
+ If a user asks for gcc@4.5:4.7, and a package is only compatible with
+ gcc@4.7.3:4.8, then that package should be able to build under the
+ constraints. Just using overlaps() would not work here.
+
+ Note that we don't need to check whether the end of this range
+ would satisfy the start of the other range, because overlaps()
+ already covers that case.
+
+ Note further that overlaps() is a symmetric operation, while
+ satisfies() is not.
"""
return (self.overlaps(other) or
# if either self.start or other.end are None, then this can't
# satisfy, or overlaps() would've taken care of it.
self.start and other.end and self.start.satisfies(other.end))
-
@coerced
def overlaps(self, other):
- return ((self.start == None or other.end is None or
+ return ((self.start is None or other.end is None or
self.start <= other.end or
other.end in self.start or self.start in other.end) and
- (other.start is None or self.end == None or
+ (other.start is None or self.end is None or
other.start <= self.end or
other.start in self.end or self.end in other.start))
-
@coerced
def union(self, other):
if not self.overlaps(other):
if (self.end is not None and other.start is not None and
- self.end.is_predecessor(other.start)):
+ self.end.is_predecessor(other.start)):
return VersionRange(self.start, other.end)
if (other.end is not None and self.start is not None and
- other.end.is_predecessor(self.start)):
+ other.end.is_predecessor(self.start)):
return VersionRange(other.start, self.end)
return VersionList([self, other])
@@ -442,13 +428,12 @@ class VersionRange(object):
else:
end = self.end
# TODO: See note in intersection() about < and in discrepancy.
- if not other.end in self.end:
+ if other.end not in self.end:
if end in other.end or other.end > self.end:
end = other.end
return VersionRange(start, end)
-
@coerced
def intersection(self, other):
if self.overlaps(other):
@@ -470,7 +455,7 @@ class VersionRange(object):
# 1.6 < 1.6.5 = True (lexicographic)
# Should 1.6 NOT be less than 1.6.5? Hm.
# Here we test (not end in other.end) first to avoid paradox.
- if other.end is not None and not end in other.end:
+ if other.end is not None and end not in other.end:
if other.end < end or other.end in end:
end = other.end
@@ -479,15 +464,12 @@ class VersionRange(object):
else:
return VersionList()
-
def __hash__(self):
return hash((self.start, self.end))
-
def __repr__(self):
return self.__str__()
-
def __str__(self):
out = ""
if self.start:
@@ -501,6 +483,7 @@ class VersionRange(object):
@total_ordering
class VersionList(object):
"""Sorted, non-redundant list of Versions and VersionRanges."""
+
def __init__(self, vlist=None):
self.versions = []
if vlist is not None:
@@ -515,7 +498,6 @@ class VersionList(object):
for v in vlist:
self.add(ver(v))
-
def add(self, version):
if type(version) in (Version, VersionRange):
# This normalizes single-value version ranges.
@@ -524,9 +506,9 @@ class VersionList(object):
i = bisect_left(self, version)
- while i-1 >= 0 and version.overlaps(self[i-1]):
- version = version.union(self[i-1])
- del self.versions[i-1]
+ while i - 1 >= 0 and version.overlaps(self[i - 1]):
+ version = version.union(self[i - 1])
+ del self.versions[i - 1]
i -= 1
while i < len(self) and version.overlaps(self[i]):
@@ -542,7 +524,6 @@ class VersionList(object):
else:
raise TypeError("Can't add %s to VersionList" % type(version))
-
@property
def concrete(self):
if len(self) == 1:
@@ -550,11 +531,9 @@ class VersionList(object):
else:
return None
-
def copy(self):
return VersionList(self)
-
def lowest(self):
"""Get the lowest version in the list."""
if not self:
@@ -562,7 +541,6 @@ class VersionList(object):
else:
return self[0].lowest()
-
def highest(self):
"""Get the highest version in the list."""
if not self:
@@ -570,7 +548,6 @@ class VersionList(object):
else:
return self[-1].highest()
-
@coerced
def overlaps(self, other):
if not other or not self:
@@ -586,14 +563,12 @@ class VersionList(object):
o += 1
return False
-
def to_dict(self):
"""Generate human-readable dict for YAML."""
if self.concrete:
- return { 'version' : str(self[0]) }
+ return {'version': str(self[0])}
else:
- return { 'versions' : [str(v) for v in self] }
-
+ return {'versions': [str(v) for v in self]}
@staticmethod
def from_dict(dictionary):
@@ -605,7 +580,6 @@ class VersionList(object):
else:
raise ValueError("Dict must have 'version' or 'versions' in it.")
-
@coerced
def satisfies(self, other, strict=False):
"""A VersionList satisfies another if some version in the list
@@ -633,20 +607,17 @@ class VersionList(object):
o += 1
return False
-
@coerced
def update(self, other):
for v in other.versions:
self.add(v)
-
@coerced
def union(self, other):
result = self.copy()
result.update(other)
return result
-
@coerced
def intersection(self, other):
# TODO: make this faster. This is O(n^2).
@@ -656,7 +627,6 @@ class VersionList(object):
result.add(s.intersection(o))
return result
-
@coerced
def intersect(self, other):
"""Intersect this spec's list with other.
@@ -678,50 +648,40 @@ class VersionList(object):
if i == 0:
if version not in self[0]:
return False
- elif all(version not in v for v in self[i-1:]):
+ elif all(version not in v for v in self[i - 1:]):
return False
return True
-
def __getitem__(self, index):
return self.versions[index]
-
def __iter__(self):
return iter(self.versions)
-
def __reversed__(self):
return reversed(self.versions)
-
def __len__(self):
return len(self.versions)
-
@coerced
def __eq__(self, other):
return other is not None and self.versions == other.versions
-
def __ne__(self, other):
return not (self == other)
-
@coerced
def __lt__(self, other):
return other is not None and self.versions < other.versions
-
def __hash__(self):
return hash(tuple(self.versions))
-
def __str__(self):
return ",".join(str(v) for v in self.versions)
-
def __repr__(self):
return str(self.versions)
@@ -730,7 +690,7 @@ def _string_to_version(string):
"""Converts a string to a Version, VersionList, or VersionRange.
This is private. Client code should use ver().
"""
- string = string.replace(' ','')
+ string = string.replace(' ', '')
if ',' in string:
return VersionList(string.split(','))
@@ -738,7 +698,7 @@ def _string_to_version(string):
elif ':' in string:
s, e = string.split(':')
start = Version(s) if s else None
- end = Version(e) if e else None
+ end = Version(e) if e else None
return VersionRange(start, end)
else:
diff --git a/lib/spack/spack/yaml_version_check.py b/lib/spack/spack/yaml_version_check.py
new file mode 100644
index 0000000000..c2d084d6c3
--- /dev/null
+++ b/lib/spack/spack/yaml_version_check.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Yaml Version Check is a module for ensuring that config file
+formats are compatible with the current version of Spack."""
+import os.path
+import os
+import llnl.util.tty as tty
+import spack.util.spack_yaml as syaml
+import spack.config
+
+
+def check_yaml_versions():
+ check_compiler_yaml_version()
+
+def check_compiler_yaml_version():
+ config_scopes = spack.config.config_scopes
+ for scope in config_scopes.values():
+ file_name = os.path.join(scope.path, 'compilers.yaml')
+ data = None
+ if os.path.isfile(file_name):
+ with open(file_name) as f:
+ data = syaml.load(f)
+
+ if data:
+ compilers = data['compilers']
+ if len(compilers) > 0:
+ if (not isinstance(compilers, list)) or 'operating_system' not in compilers[0]['compiler']:
+ new_file = os.path.join(scope.path, '_old_compilers.yaml')
+ tty.warn('%s in out of date compilers format. '
+ 'Moved to %s. Spack automatically generate '
+ 'a compilers config file '
+ % (file_name, new_file))
+ os.rename(file_name, new_file)