summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorErik Schnetter <schnetter@gmail.com>2016-08-23 09:45:46 -0400
committerErik Schnetter <schnetter@gmail.com>2016-08-23 09:45:46 -0400
commit64d3f87e605f596c149152bbab98ad3cfc89d549 (patch)
tree8b767ef2943afafdd6b9932fba2b509d4fe344b7 /lib
parente43eaad5570009622bf0b19b2a9df2f7e70f8f3e (diff)
parente81f3daa2836c4eb8a7d4e6d0f2cfc6b65875612 (diff)
downloadspack-64d3f87e605f596c149152bbab98ad3cfc89d549.tar.gz
spack-64d3f87e605f596c149152bbab98ad3cfc89d549.tar.bz2
spack-64d3f87e605f596c149152bbab98ad3cfc89d549.tar.xz
spack-64d3f87e605f596c149152bbab98ad3cfc89d549.zip
Merge branch 'develop' into eschnett/sympol
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst386
-rw-r--r--lib/spack/docs/conf.py3
-rw-r--r--lib/spack/docs/configuration.rst29
-rw-r--r--lib/spack/docs/developer_guide.rst9
-rw-r--r--lib/spack/docs/mirrors.rst18
-rw-r--r--lib/spack/docs/packaging_guide.rst381
-rwxr-xr-xlib/spack/env/cc52
l---------lib/spack/env/cray/CC1
l---------lib/spack/env/cray/cc1
l---------lib/spack/env/cray/ftn1
l---------lib/spack/env/craype/CC1
l---------lib/spack/env/craype/cc1
l---------lib/spack/env/craype/ftn1
-rw-r--r--lib/spack/llnl/util/filesystem.py128
-rw-r--r--lib/spack/llnl/util/lang.py79
-rw-r--r--lib/spack/llnl/util/link_tree.py10
-rw-r--r--lib/spack/llnl/util/lock.py92
-rw-r--r--lib/spack/llnl/util/tty/__init__.py16
-rw-r--r--lib/spack/llnl/util/tty/colify.py41
-rw-r--r--lib/spack/llnl/util/tty/color.py29
-rw-r--r--lib/spack/llnl/util/tty/log.py16
-rw-r--r--lib/spack/spack/__init__.py36
-rw-r--r--lib/spack/spack/abi.py24
-rw-r--r--lib/spack/spack/architecture.py539
-rw-r--r--lib/spack/spack/build_environment.py244
-rw-r--r--lib/spack/spack/cmd/__init__.py120
-rw-r--r--lib/spack/spack/cmd/activate.py4
-rw-r--r--lib/spack/spack/cmd/arch.py8
-rw-r--r--lib/spack/spack/cmd/bootstrap.py49
-rw-r--r--lib/spack/spack/cmd/cd.py3
-rw-r--r--lib/spack/spack/cmd/checksum.py21
-rw-r--r--lib/spack/spack/cmd/clean.py1
-rw-r--r--lib/spack/spack/cmd/common/__init__.py24
-rw-r--r--lib/spack/spack/cmd/common/arguments.py96
-rw-r--r--lib/spack/spack/cmd/compiler.py73
-rw-r--r--lib/spack/spack/cmd/compilers.py6
-rw-r--r--lib/spack/spack/cmd/config.py10
-rw-r--r--lib/spack/spack/cmd/create.py246
-rw-r--r--lib/spack/spack/cmd/deactivate.py11
-rw-r--r--lib/spack/spack/cmd/dependents.py7
-rw-r--r--lib/spack/spack/cmd/diy.py10
-rw-r--r--lib/spack/spack/cmd/doc.py1
-rw-r--r--lib/spack/spack/cmd/edit.py14
-rw-r--r--lib/spack/spack/cmd/env.py8
-rw-r--r--lib/spack/spack/cmd/extensions.py11
-rw-r--r--lib/spack/spack/cmd/fetch.py14
-rw-r--r--lib/spack/spack/cmd/find.py92
-rw-r--r--lib/spack/spack/cmd/graph.py13
-rw-r--r--lib/spack/spack/cmd/help.py4
-rw-r--r--lib/spack/spack/cmd/info.py19
-rw-r--r--lib/spack/spack/cmd/install.py14
-rw-r--r--lib/spack/spack/cmd/list.py50
-rw-r--r--lib/spack/spack/cmd/load.py7
-rw-r--r--lib/spack/spack/cmd/location.py29
-rw-r--r--lib/spack/spack/cmd/md5.py5
-rw-r--r--lib/spack/spack/cmd/mirror.py27
-rw-r--r--lib/spack/spack/cmd/module.py256
-rw-r--r--lib/spack/spack/cmd/package-list.py30
-rw-r--r--lib/spack/spack/cmd/patch.py6
-rw-r--r--lib/spack/spack/cmd/pkg.py53
-rw-r--r--lib/spack/spack/cmd/providers.py9
-rw-r--r--lib/spack/spack/cmd/purge.py32
-rw-r--r--lib/spack/spack/cmd/python.py10
-rw-r--r--lib/spack/spack/cmd/reindex.py2
-rw-r--r--lib/spack/spack/cmd/repo.py29
-rw-r--r--lib/spack/spack/cmd/restage.py1
-rw-r--r--lib/spack/spack/cmd/setup.py94
-rw-r--r--lib/spack/spack/cmd/spec.py13
-rw-r--r--lib/spack/spack/cmd/stage.py4
-rw-r--r--lib/spack/spack/cmd/test-install.py68
-rw-r--r--lib/spack/spack/cmd/test.py37
-rw-r--r--lib/spack/spack/cmd/uninstall.py74
-rw-r--r--lib/spack/spack/cmd/unload.py6
-rw-r--r--lib/spack/spack/cmd/unuse.py6
-rw-r--r--lib/spack/spack/cmd/url-parse.py14
-rw-r--r--lib/spack/spack/cmd/urls.py5
-rw-r--r--lib/spack/spack/cmd/use.py6
-rw-r--r--lib/spack/spack/cmd/versions.py7
-rw-r--r--lib/spack/spack/compiler.py127
-rw-r--r--lib/spack/spack/compilers/__init__.py274
-rw-r--r--lib/spack/spack/compilers/cce.py55
-rw-r--r--lib/spack/spack/compilers/clang.py14
-rw-r--r--lib/spack/spack/compilers/gcc.py13
-rw-r--r--lib/spack/spack/compilers/intel.py13
-rw-r--r--lib/spack/spack/compilers/nag.py18
-rw-r--r--lib/spack/spack/compilers/pgi.py14
-rw-r--r--lib/spack/spack/compilers/xl.py50
-rw-r--r--lib/spack/spack/concretize.py260
-rw-r--r--lib/spack/spack/config.py297
-rw-r--r--lib/spack/spack/database.py105
-rw-r--r--lib/spack/spack/directives.py42
-rw-r--r--lib/spack/spack/directory_layout.py105
-rw-r--r--lib/spack/spack/environment.py148
-rw-r--r--lib/spack/spack/error.py8
-rw-r--r--lib/spack/spack/fetch_strategy.py61
-rw-r--r--lib/spack/spack/file_cache.py185
-rw-r--r--lib/spack/spack/graph.py101
-rw-r--r--lib/spack/spack/hooks/__init__.py2
-rw-r--r--lib/spack/spack/hooks/extensions.py2
-rw-r--r--lib/spack/spack/hooks/licensing.py5
-rw-r--r--lib/spack/spack/hooks/sbang.py23
-rw-r--r--lib/spack/spack/mirror.py27
-rw-r--r--lib/spack/spack/modules.py93
-rw-r--r--lib/spack/spack/multimethod.py17
-rw-r--r--lib/spack/spack/operating_systems/__init__.py0
-rw-r--r--lib/spack/spack/operating_systems/cnl.py69
-rw-r--r--lib/spack/spack/operating_systems/linux_distro.py24
-rw-r--r--lib/spack/spack/operating_systems/mac_os.py30
-rw-r--r--lib/spack/spack/package.py359
-rw-r--r--lib/spack/spack/parse.py20
-rw-r--r--lib/spack/spack/patch.py4
-rw-r--r--lib/spack/spack/platforms/__init__.py0
-rw-r--r--lib/spack/spack/platforms/bgq.py18
-rw-r--r--lib/spack/spack/platforms/cray.py105
-rw-r--r--lib/spack/spack/platforms/darwin.py27
-rw-r--r--lib/spack/spack/platforms/linux.py32
-rw-r--r--lib/spack/spack/platforms/test.py51
-rw-r--r--lib/spack/spack/preferred_packages.py112
-rw-r--r--lib/spack/spack/provider_index.py (renamed from lib/spack/spack/virtual.py)165
-rw-r--r--lib/spack/spack/repository.py319
-rw-r--r--lib/spack/spack/resource.py6
-rw-r--r--lib/spack/spack/schema/__init__.py33
-rw-r--r--lib/spack/spack/schema/compilers.py80
-rw-r--r--lib/spack/spack/schema/mirrors.py44
-rw-r--r--lib/spack/spack/schema/modules.py158
-rw-r--r--lib/spack/spack/schema/packages.py86
-rw-r--r--lib/spack/spack/schema/repos.py41
-rw-r--r--lib/spack/spack/schema/targets.py45
-rw-r--r--lib/spack/spack/spec.py786
-rw-r--r--lib/spack/spack/stage.py122
-rw-r--r--lib/spack/spack/test/__init__.py63
-rw-r--r--lib/spack/spack/test/architecture.py163
-rw-r--r--lib/spack/spack/test/build_system_guess.py (renamed from lib/spack/spack/test/configure_guess.py)26
-rw-r--r--lib/spack/spack/test/cc.py64
-rw-r--r--lib/spack/spack/test/cmd/find.py6
-rw-r--r--lib/spack/spack/test/cmd/module.py91
-rw-r--r--lib/spack/spack/test/cmd/test_compiler_cmd.py82
-rw-r--r--lib/spack/spack/test/cmd/test_install.py66
-rw-r--r--lib/spack/spack/test/cmd/uninstall.py2
-rw-r--r--lib/spack/spack/test/concretize.py123
-rw-r--r--lib/spack/spack/test/concretize_preferences.py106
-rw-r--r--lib/spack/spack/test/config.py145
-rw-r--r--lib/spack/spack/test/data/sourceme_first.sh3
-rw-r--r--lib/spack/spack/test/data/sourceme_second.sh3
-rw-r--r--lib/spack/spack/test/database.py105
-rw-r--r--lib/spack/spack/test/directory_layout.py13
-rw-r--r--lib/spack/spack/test/environment.py71
-rw-r--r--lib/spack/spack/test/file_cache.py83
-rw-r--r--lib/spack/spack/test/git_fetch.py18
-rw-r--r--lib/spack/spack/test/hg_fetch.py8
-rw-r--r--lib/spack/spack/test/install.py15
-rw-r--r--lib/spack/spack/test/link_tree.py6
-rw-r--r--lib/spack/spack/test/lock.py316
-rw-r--r--lib/spack/spack/test/make_executable.py26
-rw-r--r--lib/spack/spack/test/mirror.py17
-rw-r--r--lib/spack/spack/test/mock_database.py7
-rw-r--r--lib/spack/spack/test/mock_packages_test.py122
-rw-r--r--lib/spack/spack/test/mock_repo.py10
-rw-r--r--lib/spack/spack/test/modules.py193
-rw-r--r--lib/spack/spack/test/multimethod.py38
-rw-r--r--lib/spack/spack/test/namespace_trie.py6
-rw-r--r--lib/spack/spack/test/operating_system.py75
-rw-r--r--lib/spack/spack/test/optional_deps.py36
-rw-r--r--lib/spack/spack/test/package_sanity.py3
-rw-r--r--lib/spack/spack/test/packages.py49
-rw-r--r--lib/spack/spack/test/pattern.py2
-rw-r--r--lib/spack/spack/test/provider_index.py93
-rw-r--r--lib/spack/spack/test/python_version.py11
-rw-r--r--lib/spack/spack/test/sbang.py41
-rw-r--r--lib/spack/spack/test/spec_dag.py190
-rw-r--r--lib/spack/spack/test/spec_semantics.py215
-rw-r--r--lib/spack/spack/test/spec_syntax.py135
-rw-r--r--lib/spack/spack/test/spec_yaml.py9
-rw-r--r--lib/spack/spack/test/stage.py32
-rw-r--r--lib/spack/spack/test/svn_fetch.py8
-rw-r--r--lib/spack/spack/test/tally_plugin.py1
-rw-r--r--lib/spack/spack/test/url_extrapolate.py20
-rw-r--r--lib/spack/spack/test/url_parse.py7
-rw-r--r--lib/spack/spack/test/url_substitution.py30
-rw-r--r--lib/spack/spack/test/versions.py93
-rw-r--r--lib/spack/spack/test/yaml.py12
-rw-r--r--lib/spack/spack/url.py79
-rw-r--r--lib/spack/spack/util/compression.py4
-rw-r--r--lib/spack/spack/util/crypto.py11
-rw-r--r--lib/spack/spack/util/debug.py5
-rw-r--r--lib/spack/spack/util/executable.py1
-rw-r--r--lib/spack/spack/util/multiproc.py18
-rw-r--r--lib/spack/spack/util/naming.py35
-rw-r--r--lib/spack/spack/util/pattern.py82
-rw-r--r--lib/spack/spack/util/prefix.py1
-rw-r--r--lib/spack/spack/util/spack_yaml.py57
-rw-r--r--lib/spack/spack/util/string.py4
-rw-r--r--lib/spack/spack/util/web.py31
-rw-r--r--lib/spack/spack/variant.py2
-rw-r--r--lib/spack/spack/version.py189
-rw-r--r--lib/spack/spack/yaml_version_check.py57
196 files changed, 8995 insertions, 3332 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 50c48b802b..a42d941791 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -24,12 +24,29 @@ Spack can install:
.. command-output:: spack list
-The packages are listed by name in alphabetical order. You can also
-do wildcats searches using ``*``:
+The packages are listed by name in alphabetical order. If you specify a
+pattern to match, it will follow this set of rules. A pattern with no
+wildcards, ``*`` or ``?``, will be treated as though it started and ended with
+``*``, so ``util`` is equivalent to ``*util*``. A pattern with no capital
+letters will be treated as case-insensitive. You can also add the ``-i`` flag
+to specify a case insensitive search, or ``-d`` to search the description of
+the package in addition to the name. Some examples:
-.. command-output:: spack list m*
+All packages whose names contain "sql" case insensitive:
-.. command-output:: spack list *util*
+.. command-output:: spack list sql
+
+All packages whose names start with a capital M:
+
+.. command-output:: spack list 'M*'
+
+All packages whose names or descriptions contain Documentation:
+
+.. command-output:: spack list -d Documentation
+
+All packages whose names contain documentation case insensitive:
+
+.. command-output:: spack list -d documentation
.. _spack-info:
@@ -97,13 +114,13 @@ that the packages is installed:
$ spack install mpileaks
==> Installing mpileaks
- ==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
- ==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
- ==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
+ ==> mpich is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4.
+ ==> callpath is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318.
+ ==> adept-utils is already installed in /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
- ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=chaos_5_x86_64_ib-59f6ad23.
+ ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23/mpileaks-1.0.tar.gz
+ ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23.
==> No patches needed for mpileaks.
==> Building mpileaks.
@@ -111,7 +128,7 @@ that the packages is installed:
==> Successfully installed mpileaks.
Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
- [+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
+ [+] /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpileaks@1.0-59f6ad23
The last line, with the ``[+]``, indicates where the package is
installed.
@@ -166,7 +183,7 @@ To uninstall a package and every package that depends on it, you may give the
spack uninstall --dependents mpich
-will display a list of all the packages that depends on `mpich` and, upon confirmation,
+will display a list of all the packages that depend on `mpich` and, upon confirmation,
will uninstall them in the right order.
A line like
@@ -213,7 +230,7 @@ Running ``spack find`` with no arguments lists installed packages:
$ spack find
==> 74 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
@@ -239,7 +256,7 @@ Running ``spack find`` with no arguments lists installed packages:
lcms@2.6 pixman@0.32.6 xz@5.2.0
libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
- -- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.9.2 --------------------------------
libelf@0.8.10 mpich@3.0.4
Packages are divided into groups according to their architecture and
@@ -262,7 +279,7 @@ in more detail using ``spack find -d``, and by asking only to show
$ spack find --deps libdwarf
==> 2 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
^libelf@0.8.12
libdwarf@20130729-b52fac98
@@ -278,7 +295,7 @@ want to know whether two packages' dependencies differ, you can use
$ spack find -l libdwarf
==> 2 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
Now the ``libwarf`` installs have hashes after their names. These are
@@ -292,14 +309,14 @@ use ``spack find -p``:
$ spack find -p
==> 74 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
- adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
- atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
- boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
- bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
- cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
- callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
+ adept-utils@1.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da
+ atk@2.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/atk@2.14.0-3d09ac09
+ boost@1.55.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/boost@1.55.0
+ bzip2@1.0.6 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/bzip2@1.0.6
+ cairo@1.14.0 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/cairo@1.14.0-fcc2ab44
+ callpath@1.0.2 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318
...
And, finally, you can restrict your search to a particular package
@@ -308,10 +325,10 @@ by supplying its name:
.. code-block:: sh
$ spack find -p libelf
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
- libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
- libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ libelf@0.8.11 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.11
+ libelf@0.8.12 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.12
+ libelf@0.8.13 /home/gamblin2/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
@@ -321,7 +338,7 @@ package. If you want to find only libelf versions greater than version
.. code-block:: sh
$ spack find libelf@0.8.12:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libelf@0.8.12 libelf@0.8.13
Finding just the versions of libdwarf built with a particular version
@@ -331,7 +348,7 @@ of libelf would look like this:
$ spack find -l libdwarf ^libelf@0.8.12
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
We can also search for packages that have a certain attribute. For example,
@@ -342,6 +359,7 @@ will find every installed package with a 'debug' compile-time option enabled.
The full spec syntax is discussed in detail in :ref:`sec-specs`.
+.. _compiler-config:
Compiler configuration
-----------------------------------
@@ -428,7 +446,7 @@ If you want to see specifics on a particular compiler, you can run
fc = /usr/local/bin/ifort-15.0.090
This shows which C, C++, and Fortran compilers were detected by Spack.
-Notice also that we didn't have to be too specific about the
+Notice also that we didn\'t have to be too specific about the
version. We just said ``intel@15``, and information about the only
matching Intel compiler was displayed.
@@ -443,19 +461,17 @@ editing your ``~/.spack/compilers.yaml`` file. You can do this by running
Each compiler configuration in the file looks like this::
...
- chaos_5_x86_64_ib:
- ...
- intel@15.0.0:
+ compilers:
+ - compiler:
+ modules = []
+ operating_system: OS
+ paths:
cc: /usr/local/bin/icc-15.0.024-beta
cxx: /usr/local/bin/icpc-15.0.024-beta
f77: /usr/local/bin/ifort-15.0.024-beta
fc: /usr/local/bin/ifort-15.0.024-beta
- ...
-The chaos_5_x86_64_ib string is an architecture string, and multiple
-compilers can be listed underneath an architecture. The architecture
-string may be replaced with the string 'all' to signify compilers that
-work on all architectures.
+ spec: intel@15.0.0:
For compilers, like ``clang``, that do not support Fortran, put
``None`` for ``f77`` and ``fc``::
@@ -471,10 +487,11 @@ list displayed by ``spack compilers``.
You can also add compiler flags to manually configured compilers. The
valid flags are ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``,
-``ldflags``, and ``ldlibs``. For example,::
+``ldflags``, and ``ldlibs``. For example::
...
- chaos_5_x86_64_ib:
+ compilers:
+ - compiler:
...
intel@15.0.0:
cc: /usr/local/bin/icc-15.0.024-beta
@@ -501,10 +518,10 @@ Spack, that descriptor is called a *spec*. Spack uses specs to refer
to a particular build configuration (or configurations) of a package.
Specs are more than a package name and a version; you can use them to
specify the compiler, compiler version, architecture, compile options,
-and dependency options for a build. In this section, we'll go over
+and dependency options for a build. In this section, we\'ll go over
the full syntax of specs.
-Here is an example of a much longer spec than we've seen thus far::
+Here is an example of a much longer spec than we\'ve seen thus far::
mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
@@ -526,10 +543,11 @@ More formally, a spec consists of the following pieces:
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
``-qt``, or ``~qt``) for boolean variants
* ``name=<value>`` Optional variant specifiers that are not restricted to
-boolean variants
+ boolean variants
* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
-``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
-* ``arch=<value>`` Optional architecture specifier (``arch=bgq_os``)
+ ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
+* ``target=<value> os=<value>`` Optional architecture specifier
+ (``target=haswell os=CNL10``)
* ``^`` Dependency specs (``^callpath@1.1``)
There are two things to notice here. The first is that specs are
@@ -609,7 +627,7 @@ compilers, variants, and architectures just like any other spec.
Specifiers are associated with the nearest package name to their left.
For example, above, ``@1.1`` and ``%gcc@4.7.2`` associates with the
``callpath`` package, while ``@1.2:1.4``, ``%gcc@4.7.5``, ``+debug``,
-``-qt``, and ``arch=bgq_os`` all associate with the ``mpileaks`` package.
+``-qt``, and ``target=haswell os=CNL10`` all associate with the ``mpileaks`` package.
In the diagram above, ``mpileaks`` depends on ``mpich`` with an
unspecified version, but packages can depend on other packages with
@@ -741,14 +759,18 @@ in gnu autotools. If all flags are set, the order is
Architecture specifiers
~~~~~~~~~~~~~~~~~~~~~~~
-.. Note::
+The architecture can be specified by using the reserved
+words ``target`` and/or ``os`` (``target=x86-64 os=debian7``). You can also
+use the triplet form of platform, operating system and processor.
- Architecture specifiers are part of specs but are not yet
- functional. They will be in Spack version 1.0, due in Q3 2015.
+.. code-block:: sh
-The architecture specifier looks identical to a variant specifier for a
-non-boolean variant. The architecture can be specified only using the
-reserved name ``arch`` (``arch=bgq_os``).
+ spack install libelf arch=cray_xc-CNL10-haswell
+
+Users on non-Cray systems won't have to worry about specifying the architecture.
+Spack will autodetect what kind of operating system is on your machine as well
+as the processor. For more information on how the architecture can be
+used on Cray machines, check here :ref:`spack-cray`
.. _sec-virtual-dependencies:
@@ -968,7 +990,7 @@ of installed packages.
$ module avail
- ------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
+ ------- /home/gamblin2/spack/share/spack/modules/linux-debian7-x86_64 --------
adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
@@ -1039,7 +1061,7 @@ Spack. For example, this will add the ``mpich`` package built with
$ spack use mpich %gcc@4.4.7
Prepending: mpich@3.0.4%gcc@4.4.7 (ok)
$ which mpicc
- ~/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4/bin/mpicc
+ ~/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4/bin/mpicc
Or, similarly with modules, you could type:
@@ -1072,8 +1094,8 @@ than one installed package matches it), then Spack will warn you:
$ spack load libelf
==> Error: Multiple matches for spec libelf. Choose one:
- libelf@0.8.13%gcc@4.4.7 arch=chaos_5_x86_64_ib
- libelf@0.8.13%intel@15.0.0 arch=chaos_5_x86_64_ib
+ libelf@0.8.13%gcc@4.4.7 arch=linux-debian7-x86_64
+ libelf@0.8.13%intel@15.0.0 arch=linux-debian7-x86_64
You can either type the ``spack load`` command again with a fully
qualified argument, or you can add just enough extra constraints to
@@ -1114,6 +1136,80 @@ of module files:
"""Set up the compile and runtime environments for a package."""
pass
+
+Recursive Modules
+``````````````````
+
+In some cases, it is desirable to load not just a module, but also all
+the modules it depends on. This is not required for most modules
+because Spack builds binaries with RPATH support. However, not all
+packages use RPATH to find their dependencies: this can be true in
+particular for Python extensions, which are currently *not* built with
+RPATH.
+
+Modules may be loaded recursively with the ``load`` command's
+``--dependencies`` or ``-r`` argument:
+
+.. code-block:: sh
+
+ $ spack load --dependencies <spec> ...
+
+More than one spec may be placed on the command line here.
+
+Module Commands for Shell Scripts
+``````````````````````````````````
+
+Although Spack is flexible, the ``module`` command is much faster.
+This could become an issue when emitting a series of ``spack load``
+commands inside a shell script. By adding the ``--shell`` flag,
+``spack module find`` may also be used to generate code that can be
+cut-and-pasted into a shell script. For example:
+
+.. code-block:: sh
+
+ $ spack module find tcl --dependencies --shell py-numpy git
+ # bzip2@1.0.6%gcc@4.9.3=linux-x86_64
+ module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
+ # ncurses@6.0%gcc@4.9.3=linux-x86_64
+ module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
+ # zlib@1.2.8%gcc@4.9.3=linux-x86_64
+ module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
+ # sqlite@3.8.5%gcc@4.9.3=linux-x86_64
+ module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
+ # readline@6.3%gcc@4.9.3=linux-x86_64
+ module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
+ # python@3.5.1%gcc@4.9.3=linux-x86_64
+ module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
+ # py-setuptools@20.5%gcc@4.9.3=linux-x86_64
+ module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
+ # py-nose@1.3.7%gcc@4.9.3=linux-x86_64
+ module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
+ # openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
+ module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
+ # py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
+ module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
+ # curl@7.47.1%gcc@4.9.3=linux-x86_64
+ module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
+ # autoconf@2.69%gcc@4.9.3=linux-x86_64
+ module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
+ # cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
+ module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
+ # expat@2.1.0%gcc@4.9.3=linux-x86_64
+ module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
+ # git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
+ module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
+
+The script may be further edited by removing unnecessary modules.
+This script may be directly executed in bash via
+
+.. code-block :: sh
+
+ source <( spack module find tcl --dependencies --shell py-numpy git )
+
+
+Regenerating Module files
+~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
.. code-block:: python
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
@@ -1378,23 +1474,23 @@ files in the ``cmake`` package while retaining its dependencies.
.. code-block:: sh
-
+
$ spack view -v symlink myview cmake@3.5.2
==> Linking package: "ncurses"
==> Linking package: "zlib"
==> Linking package: "openssl"
==> Linking package: "cmake"
-
+
$ ls myview/
bin doc etc include lib share
$ ls myview/bin/
captoinfo clear cpack ctest infotocap openssl tabs toe tset
ccmake cmake c_rehash infocmp ncurses6-config reset tic tput
-
+
$ spack view -v -d false rm myview cmake@3.5.2
==> Removing package: "cmake"
-
+
$ ls myview/bin/
captoinfo c_rehash infotocap openssl tabs toe tset
clear infocmp ncurses6-config reset tic tput
@@ -1404,7 +1500,7 @@ Limitations of Filesystem Views
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This section describes some limitations that should be considered in
-using filesystems views.
+using filesystems views.
Filesystem views are merely organizational. The binary executable
programs, shared libraries and other build products found in a view
@@ -1453,7 +1549,7 @@ an *extension*. Suppose you have Python installed like so:
$ spack find python
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
python@2.7.8
.. _spack-extensions:
@@ -1466,7 +1562,7 @@ You can find extensions for your Python installation like this:
.. code-block:: sh
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1478,7 +1574,7 @@ You can find extensions for your Python installation like this:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
@@ -1494,8 +1590,8 @@ prefixes, and you can see this with ``spack find -p``:
$ spack find -p py-numpy
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
@@ -1556,9 +1652,9 @@ installation:
.. code-block:: sh
$ spack activate py-numpy
- ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
Several things have happened here. The user requested that
``py-numpy`` be activated in the ``python`` installation it was built
@@ -1573,7 +1669,7 @@ packages listed as activated:
.. code-block:: sh
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7 arch=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1585,14 +1681,14 @@ packages listed as activated:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> 3 currently activated:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
@@ -1621,7 +1717,7 @@ dependencies, you can use ``spack activate -f``:
.. code-block:: sh
$ spack activate -f py-numpy
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
.. _spack-deactivate:
@@ -1653,7 +1749,7 @@ Spack currently needs to be run from a filesystem that supports
``flock`` locking semantics. Nearly all local filesystems and recent
versions of NFS support this, but parallel filesystems may be mounted
without ``flock`` support enabled. You can determine how your
-filesystems are mounted with ``mount -p``. The output for a Lustre
+ filesystems are mounted with ``mount -p``. The output for a Lustre
filesystem might look like this:
.. code-block:: sh
@@ -1674,7 +1770,7 @@ This issue typically manifests with the error below:
Traceback (most recent call last):
File "./spack", line 176, in <module>
main()
- File "./spack", line 154, in main
+ File "./spack", line 154,' in main
return_val = command(parser, args)
File "./spack/lib/spack/spack/cmd/find.py", line 170, in find
specs = set(spack.installed_db.query(**q_args))
@@ -1692,6 +1788,146 @@ This issue typically manifests with the error below:
A nicer error message is TBD in future versions of Spack.
+
+.. _spack-cray:
+
+Spack on Cray
+-----------------------------
+
+Spack differs slightly when used on a Cray system. The architecture spec
+can differentiate between the front-end and back-end processor and operating system.
+For example, on Edison at NERSC, the back-end target processor
+is \"Ivy Bridge\", so you can specify to use the back-end this way:
+
+.. code-block:: sh
+
+ spack install zlib target=ivybridge
+
+You can also use the operating system to build against the back-end:
+
+.. code-block:: sh
+
+ spack install zlib os=CNL10
+
+Notice that the name includes both the operating system name and the major
+version number concatenated together.
+
+Alternatively, if you want to build something for the front-end,
+you can specify the front-end target processor. The processor for a login node
+on Edison is \"Sandy bridge\" so we specify on the command line like so:
+
+.. code-block:: sh
+
+ spack install zlib target=sandybridge
+
+And the front-end operating system is:
+
+.. code-block:: sh
+
+ spack install zlib os=SuSE11
+
+
+
+Cray compiler detection
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Spack can detect compilers using two methods. For the front-end, we treat
+everything the same. The difference lies in back-end compiler detection.
+Back-end compiler detection is made via the Tcl module avail command.
+Once it detects the compiler it writes the appropriate PrgEnv and compiler
+module name to compilers.yaml and sets the paths to each compiler with Cray\'s
+compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
+the correct PrgEnv and compiler module and will call appropriate wrapper.
+
+The compilers.yaml config file will also differ. There is a
+modules section that is filled with the compiler\'s Programming Environment
+and module name. On other systems, this field is empty []::
+
+ ...
+ - compiler:
+ modules:
+ - PrgEnv-intel
+ - intel/15.0.109
+ ...
+
+As mentioned earlier, the compiler paths will look different on a Cray system.
+Since most compilers are invoked using cc, CC and ftn, the paths for each
+compiler are replaced with their respective Cray compiler wrapper names::
+
+ ...
+ paths:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+ ...
+
+As opposed to an explicit path to the compiler executable. This allows Spack
+to call the Cray compiler wrappers during build time.
+
+For more on compiler configuration, check out :ref:`compiler-config`.
+
+Spack sets the default Cray link type to dynamic, to better match other
+other platforms. Individual packages can enable static linking (which is the
+default outside of Spack on cray systems) using the -static flag.
+
+Setting defaults and using Cray modules
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+If you want to use default compilers for each PrgEnv and also be able
+to load cray external modules, you will need to set up a packages.yaml.
+
+Here\'s an example of an external configuration for cray modules:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ modules:
+ mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich
+ mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich
+
+This tells Spack that for whatever package that depends on mpi, load the
+cray-mpich module into the environment. You can then be able to use whatever
+environment variables, libraries, etc, that are brought into the environment
+via module load.
+
+You can set the default compiler that Spack can use for each compiler type.
+If you want to use the Cray defaults, then set them under *all:* in packages.yaml.
+In the compiler field, set the compiler specs in your order of preference.
+Whenever you build with that compiler type, Spack will concretize to that version.
+
+Here is an example of a full packages.yaml used at NERSC
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ modules:
+ mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich
+ mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich
+ buildable: False
+ netcdf:
+ modules:
+ netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf
+ netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf
+ buildable: False
+ hdf5:
+ modules:
+ hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5
+ hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5
+ buildable: False
+ all:
+ compiler: [gcc@5.2.0, intel@16.0.0.109]
+
+Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
+if we want to build with intel compilers, use version 16.0.0.109. We add a spec
+for each compiler type for each cray modules. This ensures that for each
+compiler on our system we can use that external module.
+
+
+For more on external packages check out the section :ref:`sec-external_packages`.
+
Getting Help
-----------------------
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index 3d2a8251aa..f3cb268177 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -51,7 +51,8 @@ os.environ['SPACK_ROOT'] = spack_root
os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
spack_version = subprocess.Popen(
- ['spack', '-V'], stderr=subprocess.PIPE).communicate()[1].strip().split('.')
+ [spack_root + '/bin/spack', '-V'],
+ stderr=subprocess.PIPE).communicate()[1].strip().split('.')
# Set an environment variable so that colify will print output like it would to
# a terminal.
diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst
index c613071c65..d39c932021 100644
--- a/lib/spack/docs/configuration.rst
+++ b/lib/spack/docs/configuration.rst
@@ -53,6 +53,7 @@ in the first directory it finds to which it has write access. Add
more elements to the list to indicate where your own site's temporary
directory is.
+.. _sec-external_packages:
External Packages
----------------------------
@@ -70,20 +71,20 @@ directory. Here's an example of an external configuration:
packages:
openmpi:
paths:
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
This example lists three installations of OpenMPI, one built with gcc,
one built with gcc and debug information, and another built with Intel.
If Spack is asked to build a package that uses one of these MPIs as a
dependency, it will use the the pre-installed OpenMPI in
-the given directory.
+the given directory. Packages.yaml can also be used to specify modules
Each ``packages.yaml`` begins with a ``packages:`` token, followed
-by a list of package names. To specify externals, add a ``paths``
+by a list of package names. To specify externals, add a ``paths`` or ``modules``
token under the package name, which lists externals in a
-``spec : /path`` format. Each spec should be as
+``spec: /path`` or ``spec: module-name`` format. Each spec should be as
well-defined as reasonably possible. If a
package lacks a spec component, such as missing a compiler or
package version, then Spack will guess the missing component based
@@ -108,9 +109,9 @@ be:
packages:
openmpi:
paths:
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7 arch=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1 arch=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
buildable: False
The addition of the ``buildable`` flag tells Spack that it should never build
@@ -118,6 +119,9 @@ its own version of OpenMPI, and it will instead always rely on a pre-built
OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
a package name.
+If an external module is specified as not buildable, then Spack will load the
+external module into the build environment which can be used for linking.
+
The ``buildable`` does not need to be paired with external packages.
It could also be used alone to forbid packages that may be
buggy or otherwise undesirable.
@@ -138,7 +142,7 @@ Here's an example packages.yaml file that sets preferred packages:
.. code-block:: sh
packages:
- dyninst:
+ opencv:
compiler: [gcc@4.9]
variants: +debug
gperftools:
@@ -150,10 +154,10 @@ Here's an example packages.yaml file that sets preferred packages:
At a high level, this example is specifying how packages should be
-concretized. The dyninst package should prefer using gcc 4.9 and
+concretized. The opencv package should prefer using gcc 4.9 and
be built with debug options. The gperftools package should prefer version
2.2 over 2.4. Every package on the system should prefer mvapich for
-its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9).
+its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
These options are used to fill in implicit defaults. Any of them can be overwritten
on the command line if explicitly requested.
@@ -182,7 +186,6 @@ concretization rules. A provider lists a value that packages may
dependency.
-
Profiling
------------------
diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst
index 0b618aa683..d28fe4b2a5 100644
--- a/lib/spack/docs/developer_guide.rst
+++ b/lib/spack/docs/developer_guide.rst
@@ -80,10 +80,11 @@ with a high level view of Spack's directory structure::
var/
spack/ <- build & stage directories
- repos/ <- contains package repositories
- builtin/ <- pkg repository that comes with Spack
- repo.yaml <- descriptor for the builtin repository
- packages/ <- directories under here contain packages
+ repos/ <- contains package repositories
+ builtin/ <- pkg repository that comes with Spack
+ repo.yaml <- descriptor for the builtin repository
+ packages/ <- directories under here contain packages
+ cache/ <- saves resources downloaded during installs
opt/
spack/ <- packages are installed here
diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst
index dad04d053b..583575a565 100644
--- a/lib/spack/docs/mirrors.rst
+++ b/lib/spack/docs/mirrors.rst
@@ -214,3 +214,21 @@ Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``::
If you want to change the order in which mirrors are searched for
packages, you can edit this file and reorder the sections. Spack will
search the topmost mirror first and the bottom-most mirror last.
+
+.. _caching:
+
+Local Default Cache
+----------------------------
+
+Spack caches resources that are downloaded as part of installs. The cache is
+a valid spack mirror: it uses the same directory structure and naming scheme
+as other Spack mirrors (so it can be copied anywhere and referenced with a URL
+like other mirrors). The mirror is maintained locally (within the Spack
+installation directory) at :file:`var/spack/cache/`. It is always enabled (and
+is always searched first when attempting to retrieve files for an installation)
+but can be cleared with :ref:`purge <spack-purge>`; the cache directory can also
+be deleted manually without issue.
+
+Caching includes retrieved tarball archives and source control repositories, but
+only resources with an associated digest or commit ID (e.g. a revision number
+for SVN) will be cached.
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 1f83f611b0..a082b85efa 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -36,10 +36,11 @@ Creating & editing packages
``spack create``
~~~~~~~~~~~~~~~~~~~~~
-The ``spack create`` command generates a boilerplate package template
-from a URL. The URL should point to a tarball or other software
-archive. In most cases, ``spack create`` plus a few modifications is
-all you need to get a package working.
+The ``spack create`` command creates a directory with the package name and
+generates a ``package.py`` file with a boilerplate package template from a URL.
+The URL should point to a tarball or other software archive. In most cases,
+``spack create`` plus a few modifications is all you need to get a package
+working.
Here's an example:
@@ -47,12 +48,16 @@ Here's an example:
$ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
-Spack examines the tarball URL and tries to figure out the name of the
-package to be created. It also tries to determine what version strings
-look like for this package. Using this information, it will try to
-find *additional* versions by spidering the package's webpage. If it
-finds multiple versions, Spack prompts you to tell it how many
-versions you want to download and checksum:
+Spack examines the tarball URL and tries to figure out the name of the package
+to be created. Once the name is determined a directory in the appropriate
+repository is created with that name. Spack prefers, but does not require, that
+names be lower case so the directory name will be lower case when ``spack
+create`` generates it. In cases where it is desired to have mixed case or upper
+case simply rename the directory. Spack also tries to determine what version
+strings look like for this package. Using this information, it will try to find
+*additional* versions by spidering the package's webpage. If it finds multiple
+versions, Spack prompts you to tell it how many versions you want to download
+and checksum:
.. code-block:: sh
@@ -297,9 +302,10 @@ directories or files (like patches) that it needs to build.
Package Names
~~~~~~~~~~~~~~~~~~
-Packages are named after the directory containing ``package.py``. So,
-``libelf``'s ``package.py`` lives in a directory called ``libelf``.
-The ``package.py`` file defines a class called ``Libelf``, which
+Packages are named after the directory containing ``package.py``. It is
+preferred, but not required, that the directory, and thus the package name, are
+lower case. So, ``libelf``'s ``package.py`` lives in a directory called
+``libelf``. The ``package.py`` file defines a class called ``Libelf``, which
extends Spack's ``Package`` class. for example, here is
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
@@ -377,6 +383,8 @@ add a line like this in the package class:
version('8.2.1', '4136d7b4c04df68b686570afa26988ac')
...
+Versions should be listed with the newest version first.
+
Version URLs
~~~~~~~~~~~~~~~~~
@@ -385,8 +393,21 @@ in the package. For example, Spack is smart enough to download
version ``8.2.1.`` of the ``Foo`` package above from
``http://example.com/foo-8.2.1.tar.gz``.
-If spack *cannot* extrapolate the URL from the ``url`` field, or if
-the package doesn't have a ``url`` field, you can add a URL explicitly
+If spack *cannot* extrapolate the URL from the ``url`` field by
+default, you can write your own URL generation algorithm in place of
+the ``url`` declaration. For example:
+
+.. code-block:: python
+ :linenos:
+
+ class Foo(Package):
+ def url_for_version(self, version):
+ return 'http://example.com/version_%s/foo-%s.tar.gz' \
+ % (version, version)
+ version('8.2.1', '4136d7b4c04df68b686570afa26988ac')
+ ...
+
+If a URL cannot be derived systematically, you can add an explicit URL
for a particular version:
.. code-block:: python
@@ -446,14 +467,25 @@ to use based on the hash length.
``spack md5``
^^^^^^^^^^^^^^^^^^^^^^
-If you have a single file to checksum, you can use the ``spack md5``
-command to do it. Here's how you might download an archive and get a
-checksum for it:
+If you have one or more files to checksum, you can use the ``spack md5``
+command to do it:
.. code-block:: sh
- $ curl -O http://exmaple.com/foo-8.2.1.tar.gz'
- $ spack md5 foo-8.2.1.tar.gz
+ $ spack md5 foo-8.2.1.tar.gz foo-8.2.2.tar.gz
+ ==> 2 MD5 checksums:
+ 4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
+ 1586b70a49dfe05da5fcc29ef239dce0 foo-8.2.2.tar.gz
+
+``spack md5`` also accepts one or more URLs and automatically downloads
+the files for you:
+
+.. code-block:: sh
+
+ $ spack md5 http://example.com/foo-8.2.1.tar.gz
+ ==> Trying to fetch from http://example.com/foo-8.2.1.tar.gz
+ ######################################################################## 100.0%
+ ==> 1 MD5 checksum:
4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
Doing this for lots of files, or whenever a new package version is
@@ -547,7 +579,7 @@ The package author is responsible for coming up with a sensible name
for each version to be fetched from a repository. For example, if
you're fetching from a tag like ``v1.0``, you might call that ``1.0``.
If you're fetching a nameless git commit or an older subversion
-revision, you might give the commit an intuitive name, like ``dev``
+revision, you might give the commit an intuitive name, like ``develop``
for a development version, or ``some-fancy-new-feature`` if you want
to be more specific.
@@ -557,6 +589,17 @@ branches move forward over time and you aren't guaranteed to get the
same thing every time you fetch a particular version. Life isn't
always simple, though, so this is not strictly enforced.
+When fetching from from the branch corresponding to the development version
+(often called ``master``,``trunk`` or ``dev``), it is recommended to
+call this version ``develop``. Spack has special treatment for this version so
+ that ``@develop`` will satisfy dependencies like
+``depends_on(abc, when="@x.y.z:")``. In other words, ``@develop`` is
+greater than any other version. The rationale is that certain features or
+options first appear in the development branch. Therefore if a package author
+wants to keep the package on the bleeding edge and provide support for new
+features, it is advised to use ``develop`` for such a version which will
+greatly simplify writing dependencies and version-related conditionals.
+
In some future release, Spack may support extrapolating repository
versions as it does for tarball URLs, but currently this is not
supported.
@@ -572,6 +615,7 @@ Git fetching is enabled with the following parameters to ``version``:
* ``tag``: name of a tag to fetch.
* ``branch``: name of a branch to fetch.
* ``commit``: SHA hash (or prefix) of a commit to fetch.
+ * ``submodules``: Also fetch submodules when checking out this repository.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -582,7 +626,7 @@ Default branch
class Example(Package):
...
- version('dev', git='https://github.com/example-project/example.git')
+ version('develop', git='https://github.com/example-project/example.git')
This is not recommended, as the contents of the default branch
change over time.
@@ -628,6 +672,17 @@ Commits
could just use the abbreviated commit hash. It's up to the package
author to decide what makes the most sense.
+Submodules
+
+ You can supply ``submodules=True`` to cause Spack to fetch submodules
+ along with the repository at fetch time.
+
+ .. code-block:: python
+
+ version('1.0.1', git='https://github.com/example-project/example.git',
+ tag='v1.0.1', submdoules=True)
+
+
Installing
^^^^^^^^^^^^^^
@@ -655,7 +710,7 @@ Default
.. code-block:: python
- version('hg-head', hg='https://jay.grs.rwth-aachen.de/hg/example')
+ version('develop', hg='https://jay.grs.rwth-aachen.de/hg/example')
Note that this is not recommended; try to fetch a particular
revision instead.
@@ -687,7 +742,7 @@ Fetching the head
.. code-block:: python
- version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
+ version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
This is not recommended, as the head will move forward over time.
@@ -697,12 +752,19 @@ Fetching a revision
.. code-block:: python
- version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
+ version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
revision=128)
Subversion branches are handled as part of the directory structure, so
you can check out a branch or tag by changing the ``url``.
+Automatic caching of files fetched during installation
+------------------------------------------------------
+
+Spack maintains a cache (described :ref:`here <caching>`) which saves files
+retrieved during package installations to avoid re-downloading in the case that
+a package is installed with a different specification (but the same version) or
+reinstalled on account of a change in the hashing scheme.
.. _license:
@@ -776,7 +838,7 @@ Spack will create a global license file located at
file using the editor set in ``$EDITOR``, or vi if unset. It will look like
this:
-.. code-block::
+.. code-block:: sh
# A license is required to use pgi.
#
@@ -807,7 +869,7 @@ You can add your license directly to this file, or tell FlexNet to use a
license stored on a separate license server. Here is an example that
points to a license server called licman1:
-.. code-block::
+.. code-block:: sh
SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
USE_SERVER
@@ -1235,6 +1297,31 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.
+Additionally, dependencies may be specified for specific use cases:
+
+.. code-block:: python
+
+ depends_on("cmake", type="build")
+ depends_on("libelf", type=("build", "link"))
+ depends_on("python", type="run")
+
+The dependency types are:
+
+ * **"build"**: made available during the project's build. The package will
+ be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
+ Other projects which depend on this one will not have these modified
+ (building project X doesn't need project Y's build dependencies).
+ * **"link"**: the project is linked to by the project. The package will be
+ added to the current package's ``rpath``.
+ * **"run"**: the project is used by the project at runtime. The package will
+ be added to ``PATH`` and ``PYTHONPATH``.
+
+If not specified, ``type`` is assumed to be ``("build", "link")``. This is the
+common case for compiled language usage. Also available are the aliases
+``"alldeps"`` for all dependency types and ``"nolink"`` (``("build", "run")``)
+for use by dependencies which are not expressed via a linker (e.g., Python or
+Lua module loading).
+
.. _setup-dependent-environment:
``setup_dependent_environment()``
@@ -1339,6 +1426,19 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
activate``. When it is activated, all the files in its prefix will be
symbolically linked into the prefix of the python package.
+Many packages produce Python extensions for *some* variants, but not
+others: they should extend ``python`` only if the apropriate
+variant(s) are selected. This may be accomplished with conditional
+``extends()`` declarations:
+
+.. code-block:: python
+
+ class FooLib(Package):
+ variant('python', default=True, description= \
+ 'Build the Python extension Module')
+ extends('python', when='+python')
+ ...
+
Sometimes, certain files in one package will conflict with those in
another, which means they cannot both be activated (symlinked) at the
same time. In this case, you can tell Spack to ignore those files
@@ -1625,21 +1725,21 @@ the user runs ``spack install`` and the time the ``install()`` method
is called. The concretized version of the spec above might look like
this::
- mpileaks@2.3%gcc@4.7.3 arch=linux-ppc64
- ^callpath@1.0%gcc@4.7.3+debug arch=linux-ppc64
- ^dyninst@8.1.2%gcc@4.7.3 arch=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
- ^libelf@0.8.11%gcc@4.7.3 arch=linux-ppc64
- ^mpich@3.0.4%gcc@4.7.3 arch=linux-ppc64
+ mpileaks@2.3%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^callpath@1.0%gcc@4.7.3+debug arch=linux-debian7-x86_64
+ ^dyninst@8.1.2%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libelf@0.8.11%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^mpich@3.0.4%gcc@4.7.3 arch=linux-debian7-x86_64
.. graphviz::
digraph {
- "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
- "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-ppc64"
- "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-ppc64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
}
Here, all versions, compilers, and platforms are filled in, and there
@@ -1668,9 +1768,9 @@ running ``spack spec``. For example:
^libdwarf
^libelf
- dyninst@8.0.1%gcc@4.7.3 arch=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3 arch=linux-ppc64
- ^libelf@0.8.13%gcc@4.7.3 arch=linux-ppc64
+ dyninst@8.0.1%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libelf@0.8.13%gcc@4.7.3 arch=linux-debian7-x86_64
This is useful when you want to know exactly what Spack will do when
you ask for a particular spec.
@@ -1844,7 +1944,7 @@ discover its dependencies.
If you want to see the environment that a package will build with, or
if you want to run commands in that environment to test them out, you
-can use the :ref:```spack env`` <spack-env>` command, documented
+can use the :ref:`spack env <spack-env>` command, documented
below.
.. _compiler-wrappers:
@@ -1950,6 +2050,19 @@ instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for
the reasons outlined above.
+Blas and Lapack libraries
+~~~~~~~~~~~~~~~~~~~~~~~~~
+Different packages provide implementation of ``Blas`` and ``Lapack`` routines.
+The names of the resulting static and/or shared libraries differ from package
+to package. In order to make ``install()`` method indifferent to the
+choice of ``Blas`` implementation, each package which provides it
+sets up ``self.spec.blas_shared_lib`` and ``self.spec.blas_static_lib `` to
+point to the shared and static ``Blas`` libraries, respectively. The same
+applies to packages which provide ``Lapack``. Package developers are advised to
+use these variables, for example ``spec['blas'].blas_shared_lib`` instead of
+hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``.
+
+
Forking ``install()``
~~~~~~~~~~~~~~~~~~~~~
@@ -2162,12 +2275,12 @@ example:
def install(self, prefix):
# Do default install
- @when('arch=chaos_5_x86_64_ib')
+ @when('arch=linux-debian7-x86_64')
def install(self, prefix):
# This will be executed instead of the default install if
# the package's sys_type() is chaos_5_x86_64_ib.
- @when('arch=bgqos_0")
+ @when('arch=linux-debian7-x86_64")
def install(self, prefix):
# This will be executed if the package's sys_type is bgqos_0
@@ -2295,7 +2408,7 @@ build system.
.. _sanity-checks:
-Sanity checking an intallation
+Sanity checking an installation
--------------------------------
By default, Spack assumes that a build has failed if nothing is
@@ -2511,6 +2624,59 @@ File functions
.. _package-lifecycle:
+Coding Style Guidelines
+---------------------------
+
+The following guidelines are provided, in the interests of making
+Spack packages work in a consistent manner:
+
+
+Variant Names
+~~~~~~~~~~~~~~
+
+Spack packages with variants similar to already-existing Spack
+packages should use the same name for their variants. Standard
+variant names are:
+
+ ======= ======== ========================
+ Name Default Description
+ ======= ======== ========================
+ shared True Build shared libraries
+ static Build static libraries
+ mpi Use MPI
+ python Build Python extension
+ ======= ======== ========================
+
+If specified in this table, the corresponding default should be used
+when declaring a variant.
+
+
+Version Lists
+~~~~~~~~~~~~~~
+
+Spack packges should list supported versions with the newest first.
+
+Special Versions
+~~~~~~~~~~~~~~~~~
+
+The following *special* version names may be used when building a package:
+
+* *@system*: Indicates a hook to the OS-installed version of the
+ package. This is useful, for example, to tell Spack to use the
+ OS-installed version in ``packages.yaml``::
+
+ openssl:
+ paths:
+ openssl@system: /usr
+ buildable: False
+
+ Certain Spack internals look for the *@system* version and do
+ appropriate things in that case.
+
+* *@local*: Indicates the version was built manually from some source
+ tree of unknown provenance (see ``spack setup``).
+
+
Packaging workflow commands
---------------------------------
@@ -2605,11 +2771,16 @@ build process will start from scratch.
``spack purge``
~~~~~~~~~~~~~~~~~
-Cleans up all of Spack's temporary files. Use this to recover disk
-space if temporary files from interrupted or failed installs
-accumulate in the staging area. This is equivalent to running ``spack
-clean`` for every package you have fetched or staged.
+Cleans up all of Spack's temporary and cached files. This can be used to
+recover disk space if temporary files from interrupted or failed installs
+accumulate in the staging area.
+
+When called with ``--stage`` or ``--all`` (or without arguments, in which case
+the default is ``--all``) this removes all staged files; this is equivalent to
+running ``spack clean`` for every package you have fetched or staged.
+When called with ``--cache`` or ``--all`` this will clear all resources
+:ref:`cached <caching>` during installs.
Keeping the stage directory on success
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -2757,11 +2928,11 @@ build it:
$ spack stage libelf
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13.tar.gz
- ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64.
+ ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13.tar.gz
+ ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64.
$ spack cd libelf
$ pwd
- /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-ppc64/libelf-0.8.13
+ /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13
``spack cd`` here changed he current working directory to the
directory containing the expanded ``libelf`` source code. There are a
@@ -2826,3 +2997,109 @@ might write:
DWARF_PREFIX = $(spack location -i libdwarf)
CXXFLAGS += -I$DWARF_PREFIX/include
CXXFLAGS += -L$DWARF_PREFIX/lib
+
+Build System Configuration Support
+----------------------------------
+
+Imagine a developer creating a CMake-based (or Autotools) project in a local
+directory, which depends on libraries A-Z. Once Spack has installed
+those dependencies, one would like to run ``cmake`` with appropriate
+command line and environment so CMake can find them. The ``spack
+setup`` command does this conveniently, producing a CMake
+configuration that is essentially the same as how Spack *would have*
+configured the project. This can be demonstrated with a usage
+example:
+
+.. code-block:: bash
+
+ cd myproject
+ spack setup myproject@local
+ mkdir build; cd build
+ ../spconfig.py ..
+ make
+ make install
+
+Notes:
+ * Spack must have ``myproject/package.py`` in its repository for
+ this to work.
+ * ``spack setup`` produces the executable script ``spconfig.py`` in
+ the local directory, and also creates the module file for the
+ package. ``spconfig.py`` is normally run from the user's
+ out-of-source build directory.
+ * The version number given to ``spack setup`` is arbitrary, just
+ like ``spack diy``. ``myproject/package.py`` does not need to
+ have any valid downloadable versions listed (typical when a
+ project is new).
+ * spconfig.py produces a CMake configuration that *does not* use the
+ Spack wrappers. Any resulting binaries *will not* use RPATH,
+ unless the user has enabled it. This is recommended for
+ development purposes, not production.
+ * ``spconfig.py`` is human readable, and can serve as a developer
+ reference of what dependencies are being used.
+ * ``make install`` installs the package into the Spack repository,
+ where it may be used by other Spack packages.
+ * CMake-generated makefiles re-run CMake in some circumstances. Use
+ of ``spconfig.py`` breaks this behavior, requiring the developer
+ to manually re-run ``spconfig.py`` when a ``CMakeLists.txt`` file
+ has changed.
+
+CMakePackage
+~~~~~~~~~~~~
+
+In order ot enable ``spack setup`` functionality, the author of
+``myproject/package.py`` must subclass from ``CMakePackage`` instead
+of the standard ``Package`` superclass. Because CMake is
+standardized, the packager does not need to tell Spack how to run
+``cmake; make; make install``. Instead the packager only needs to
+create (optional) methods ``configure_args()`` and ``configure_env()``, which
+provide the arguments (as a list) and extra environment variables (as
+a dict) to provide to the ``cmake`` command. Usually, these will
+translate variant flags into CMake definitions. For example:
+
+.. code-block:: python
+
+ def configure_args(self):
+ spec = self.spec
+ return [
+ '-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
+ '-DBUILD_PYTHON=%s' % ('YES' if '+python' in spec else 'NO'),
+ '-DBUILD_GRIDGEN=%s' % ('YES' if '+gridgen' in spec else 'NO'),
+ '-DBUILD_COUPLER=%s' % ('YES' if '+coupler' in spec else 'NO'),
+ '-DUSE_PISM=%s' % ('YES' if '+pism' in spec else 'NO')]
+
+If needed, a packager may also override methods defined in
+``StagedPackage`` (see below).
+
+
+StagedPackage
+~~~~~~~~~~~~~
+
+``CMakePackage`` is implemented by subclassing the ``StagedPackage``
+superclass, which breaks down the standard ``Package.install()``
+method into several sub-stages: ``setup``, ``configure``, ``build``
+and ``install``. Details:
+
+* Instead of implementing the standard ``install()`` method, package
+ authors implement the methods for the sub-stages
+ ``install_setup()``, ``install_configure()``,
+ ``install_build()``, and ``install_install()``.
+
+* The ``spack install`` command runs the sub-stages ``configure``,
+ ``build`` and ``install`` in order. (The ``setup`` stage is
+ not run by default; see below).
+* The ``spack setup`` command runs the sub-stages ``setup``
+ and a dummy install (to create the module file).
+* The sub-stage install methods take no arguments (other than
+ ``self``). The arguments ``spec`` and ``prefix`` to the standard
+ ``install()`` method may be accessed via ``self.spec`` and
+ ``self.prefix``.
+
+GNU Autotools
+~~~~~~~~~~~~~
+
+The ``setup`` functionality is currently only available for
+CMake-based packages. Extending this functionality to GNU
+Autotools-based packages would be easy (and should be done by a
+developer who actively uses Autotools). Packages that use
+non-standard build systems can gain ``setup`` functionality by
+subclassing ``StagedPackage`` directly.
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 9758b74f37..c6bb50d261 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -110,13 +110,13 @@ case "$command" in
comp="CXX"
lang_flags=CXX
;;
- f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
+ ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
lang_flags=F
;;
- f77|gfortran|ifort|pgfortran|xlf|nagfor)
+ f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"
@@ -174,6 +174,28 @@ if [[ -z $command ]]; then
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
fi
+#
+# Filter '.' and Spack environment directories out of PATH so that
+# this script doesn't just call itself
+#
+IFS=':' read -ra env_path <<< "$PATH"
+IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
+spack_env_dirs+=("" ".")
+PATH=""
+for dir in "${env_path[@]}"; do
+ addpath=true
+ for env_dir in "${spack_env_dirs[@]}"; do
+ if [[ $dir == $env_dir ]]; then
+ addpath=false
+ break
+ fi
+ done
+ if $addpath; then
+ PATH="${PATH:+$PATH:}$dir"
+ fi
+done
+export PATH
+
if [[ $mode == vcheck ]]; then
exec ${command} "$@"
fi
@@ -286,28 +308,6 @@ unset LD_LIBRARY_PATH
unset LD_RUN_PATH
unset DYLD_LIBRARY_PATH
-#
-# Filter '.' and Spack environment directories out of PATH so that
-# this script doesn't just call itself
-#
-IFS=':' read -ra env_path <<< "$PATH"
-IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
-spack_env_dirs+=("" ".")
-PATH=""
-for dir in "${env_path[@]}"; do
- addpath=true
- for env_dir in "${spack_env_dirs[@]}"; do
- if [[ $dir == $env_dir ]]; then
- addpath=false
- break
- fi
- done
- if $addpath; then
- PATH="${PATH:+$PATH:}$dir"
- fi
-done
-export PATH
-
full_command=("$command" "${args[@]}")
# In test command mode, write out full command for Spack tests.
@@ -324,8 +324,8 @@ fi
if [[ $SPACK_DEBUG == TRUE ]]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
- echo "[$mode] $command $input_command" >> $input_log
- echo "[$mode] ${full_command[@]}" >> $output_log
+ echo "[$mode] $command $input_command" >> "$input_log"
+ echo "[$mode] ${full_command[@]}" >> "$output_log"
fi
exec "${full_command[@]}"
diff --git a/lib/spack/env/cray/CC b/lib/spack/env/cray/CC
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/cray/CC
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/cray/cc b/lib/spack/env/cray/cc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/cray/cc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/cray/ftn b/lib/spack/env/cray/ftn
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/cray/ftn
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/CC b/lib/spack/env/craype/CC
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/CC
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/cc b/lib/spack/env/craype/cc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/cc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/ftn b/lib/spack/env/craype/ftn
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/ftn
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index 6661a80f27..22ca85abf9 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -22,33 +22,34 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
- 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
- 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
- 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
- 'set_executable', 'copy_mode', 'unset_executable_mode',
- 'remove_dead_links', 'remove_linked_tree', 'find_library_path',
- 'fix_darwin_install_name']
-
import os
import glob
-import sys
import re
import shutil
import stat
import errno
import getpass
-from contextlib import contextmanager, closing
-from tempfile import NamedTemporaryFile
+from contextlib import contextmanager
import subprocess
+import fileinput
import llnl.util.tty as tty
-from spack.util.compression import ALLOWED_ARCHIVE_TYPES
+
+__all__ = ['set_install_permissions', 'install', 'install_tree',
+ 'traverse_tree',
+ 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
+ 'force_remove', 'join_path', 'ancestor', 'can_access',
+ 'filter_file',
+ 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
+ 'set_executable', 'copy_mode', 'unset_executable_mode',
+ 'remove_dead_links', 'remove_linked_tree', 'find_library_path',
+ 'fix_darwin_install_name', 'to_link_flags', 'to_lib_name']
+
def filter_file(regex, repl, *filenames, **kwargs):
"""Like sed, but uses python regular expressions.
- Filters every line of file through regex and replaces the file
+ Filters every line of each file through regex and replaces the file
with a filtered version. Preserves mode of filtered files.
As with re.sub, ``repl`` can be either a string or a callable.
@@ -59,7 +60,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
Keyword Options:
string[=False] If True, treat regex as a plain string.
- backup[=True] Make a backup files suffixed with ~
+ backup[=True] Make backup file(s) suffixed with ~
ignore_absent[=False] Ignore any files that don't exist.
"""
string = kwargs.get('string', False)
@@ -69,6 +70,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
unescaped = repl.replace(r'\\', '\\')
+
def replace_groups_with_groupid(m):
def groupid_to_group(x):
return m.group(int(x.group(1)))
@@ -79,30 +81,32 @@ def filter_file(regex, repl, *filenames, **kwargs):
regex = re.escape(regex)
for filename in filenames:
- backup = filename + "~"
+ backup_filename = filename + "~"
if ignore_absent and not os.path.exists(filename):
continue
- shutil.copy(filename, backup)
+ # Create backup file. Don't overwrite an existing backup
+ # file in case this file is being filtered multiple times.
+ if not os.path.exists(backup_filename):
+ shutil.copy(filename, backup_filename)
+
try:
- with closing(open(backup)) as infile:
- with closing(open(filename, 'w')) as outfile:
- for line in infile:
- foo = re.sub(regex, repl, line)
- outfile.write(foo)
+ for line in fileinput.input(filename, inplace=True):
+ print(re.sub(regex, repl, line.rstrip('\n')))
except:
# clean up the original file on failure.
- shutil.move(backup, filename)
+ shutil.move(backup_filename, filename)
raise
finally:
if not backup:
- shutil.rmtree(backup, ignore_errors=True)
+ os.remove(backup_filename)
class FileFilter(object):
"""Convenience class for calling filter_file a lot."""
+
def __init__(self, *filenames):
self.filenames = filenames
@@ -113,7 +117,7 @@ class FileFilter(object):
def change_sed_delimiter(old_delim, new_delim, *filenames):
"""Find all sed search/replace commands and change the delimiter.
e.g., if the file contains seds that look like 's///', you can
- call change_sed_delimeter('/', '@', file) to change the
+ call change_sed_delimiter('/', '@', file) to change the
delimiter to '@'.
NOTE that this routine will fail if the delimiter is ' or ".
@@ -157,9 +161,12 @@ def set_install_permissions(path):
def copy_mode(src, dest):
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
- if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR
- if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP
- if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH
+ if src_mode & stat.S_IXUSR:
+ dest_mode |= stat.S_IXUSR
+ if src_mode & stat.S_IXGRP:
+ dest_mode |= stat.S_IXGRP
+ if src_mode & stat.S_IXOTH:
+ dest_mode |= stat.S_IXOTH
os.chmod(dest, dest_mode)
@@ -175,7 +182,7 @@ def install(src, dest):
"""Manually install a file to a particular location."""
tty.debug("Installing %s to %s" % (src, dest))
- # Expand dsst to its eventual full path if it is a directory.
+ # Expand dest to its eventual full path if it is a directory.
if os.path.isdir(dest):
dest = join_path(dest, os.path.basename(src))
@@ -185,7 +192,7 @@ def install(src, dest):
def install_tree(src, dest, **kwargs):
- """Manually install a file to a particular location."""
+ """Manually install a directory tree to a particular location."""
tty.debug("Installing %s to %s" % (src, dest))
shutil.copytree(src, dest, **kwargs)
@@ -215,7 +222,7 @@ def mkdirp(*paths):
if not os.path.exists(path):
os.makedirs(path)
elif not os.path.isdir(path):
- raise OSError(errno.EEXIST, "File alredy exists", path)
+ raise OSError(errno.EEXIST, "File already exists", path)
def force_remove(*paths):
@@ -224,9 +231,10 @@ def force_remove(*paths):
for path in paths:
try:
os.remove(path)
- except OSError, e:
+ except OSError:
pass
+
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
@@ -240,7 +248,7 @@ def working_dir(dirname, **kwargs):
def touch(path):
"""Creates an empty file at the specified path."""
- with open(path, 'a') as file:
+ with open(path, 'a'):
os.utime(path, None)
@@ -253,7 +261,7 @@ def touchp(path):
def force_symlink(src, dest):
try:
os.symlink(src, dest)
- except OSError as e:
+ except OSError:
os.remove(dest)
os.symlink(src, dest)
@@ -275,7 +283,7 @@ def ancestor(dir, n=1):
def can_access(file_name):
"""True if we have read/write access to the file."""
- return os.access(file_name, os.R_OK|os.W_OK)
+ return os.access(file_name, os.R_OK | os.W_OK)
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
@@ -304,7 +312,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
Optional args:
- order=[pre|post] -- Whether to do pre- or post-order traveral.
+ order=[pre|post] -- Whether to do pre- or post-order traversal.
ignore=<predicate> -- Predicate indicating which files to ignore.
@@ -343,13 +351,15 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
# Treat as a directory
if os.path.isdir(source_child) and (
- follow_links or not os.path.islink(source_child)):
+ follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
- tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
- for t in tuples: yield t
+ tuples = traverse_tree(
+ source_root, dest_root, rel_child, **kwargs)
+ for t in tuples:
+ yield t
# Treat as a file.
elif not ignore(os.path.join(rel_path, f)):
@@ -379,6 +389,7 @@ def remove_dead_links(root):
if not os.path.exists(real_path):
os.unlink(path)
+
def remove_linked_tree(path):
"""
Removes a directory and its contents. If the directory is a
@@ -402,28 +413,53 @@ def fix_darwin_install_name(path):
Fix install name of dynamic libraries on Darwin to have full path.
There are two parts of this task:
(i) use install_name('-id',...) to change install name of a single lib;
- (ii) use install_name('-change',...) to change the cross linking between libs.
- The function assumes that all libraries are in one folder and currently won't
- follow subfolders.
+ (ii) use install_name('-change',...) to change the cross linking between
+ libs. The function assumes that all libraries are in one folder and
+ currently won't follow subfolders.
Args:
- path: directory in which .dylib files are alocated
+ path: directory in which .dylib files are located
"""
- libs = glob.glob(join_path(path,"*.dylib"))
+ libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
- subprocess.Popen(["install_name_tool", "-id",lib,lib], stdout=subprocess.PIPE).communicate()[0]
- long_deps = subprocess.Popen(["otool", "-L",lib], stdout=subprocess.PIPE).communicate()[0].split('\n')
+ subprocess.Popen(
+ ["install_name_tool", "-id", lib, lib],
+ stdout=subprocess.PIPE).communicate()[0]
+ long_deps = subprocess.Popen(
+ ["otool", "-L", lib],
+ stdout=subprocess.PIPE).communicate()[0].split('\n')
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
if dep == os.path.basename(loc):
- subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0]
+ subprocess.Popen(
+ ["install_name_tool", "-change", dep, loc, lib],
+ stdout=subprocess.PIPE).communicate()[0]
break
+def to_lib_name(library):
+ """Transforms a path to the library /path/to/lib<name>.xyz into <name>
+ """
+ # Assume libXYZ.suffix
+ return os.path.basename(library)[3:].split(".")[0]
+
+
+def to_link_flags(library):
+ """Transforms a path to a <library> into linking flags -L<dir> -l<name>.
+
+ Return:
+ A string of linking flags.
+ """
+ dir = os.path.dirname(library)
+ name = to_lib_name(library)
+ res = '-L%s -l%s' % (dir, name)
+ return res
+
+
def find_library_path(libname, *paths):
"""Searches for a file called <libname> in each path.
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 63eb08d803..df32012e2d 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -24,7 +24,6 @@
##############################################################################
import os
import re
-import sys
import functools
import collections
import inspect
@@ -39,14 +38,15 @@ def index_by(objects, *funcs):
Values are used as keys. For example, suppose you have four
objects with attributes that look like this:
- a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
- b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
- c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
- d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
+ a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
+ b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
+ c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
+ d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
- list_of_specs = [a,b,c,d]
- index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler)
- index2 = index_by(list_of_specs, lambda s: s.compiler)
+ list_of_specs = [a,b,c,d]
+ index1 = index_by(list_of_specs, lambda s: s.arch,
+ lambda s: s.compiler)
+ index2 = index_by(list_of_specs, lambda s: s.compiler)
``index1'' now has two levels of dicts, with lists at the
leaves, like this:
@@ -137,7 +137,7 @@ def get_calling_module_name():
finally:
del stack
- if not '__module__' in caller_locals:
+ if '__module__' not in caller_locals:
raise RuntimeError("Must invoke get_calling_module_name() "
"from inside a class definition!")
@@ -173,11 +173,11 @@ def has_method(cls, name):
class memoized(object):
"""Decorator that caches the results of a function, storing them
in an attribute of that function."""
+
def __init__(self, func):
self.func = func
self.cache = {}
-
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# Not hashable, so just call the function.
@@ -187,12 +187,10 @@ class memoized(object):
self.cache[args] = self.func(*args)
return self.cache[args]
-
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
-
def clear(self):
"""Expunge cache so that self.func will be called again."""
self.cache.clear()
@@ -237,13 +235,21 @@ def key_ordering(cls):
if not has_method(cls, '_cmp_key'):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
- setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
- setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key())
- setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key())
-
- setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
- setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key())
- setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key())
+ setter('__eq__',
+ lambda s, o:
+ (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
+ setter('__lt__',
+ lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
+ setter('__le__',
+ lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
+
+ setter('__ne__',
+ lambda s, o:
+ (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
+ setter('__gt__',
+ lambda s, o: o is None or s._cmp_key() > o._cmp_key())
+ setter('__ge__',
+ lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
setter('__hash__', lambda self: hash(self._cmp_key()))
@@ -254,10 +260,10 @@ def key_ordering(cls):
class HashableMap(dict):
"""This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary."""
+
def _cmp_key(self):
return tuple(sorted(self.values()))
-
def copy(self):
"""Type-agnostic clone method. Preserves subclass type."""
# Construct a new dict of my type
@@ -336,24 +342,39 @@ def match_predicate(*args):
return match
-
def DictWrapper(dictionary):
"""Returns a class that wraps a dictionary and enables it to be used
like an object."""
class wrapper(object):
- def __getattr__(self, name): return dictionary[name]
- def __setattr__(self, name, value): dictionary[name] = value
- def setdefault(self, *args): return dictionary.setdefault(*args)
- def get(self, *args): return dictionary.get(*args)
- def keys(self): return dictionary.keys()
- def values(self): return dictionary.values()
- def items(self): return dictionary.items()
- def __iter__(self): return iter(dictionary)
+ def __getattr__(self, name):
+ return dictionary[name]
+
+ def __setattr__(self, name, value):
+ dictionary[name] = value
+
+ def setdefault(self, *args):
+ return dictionary.setdefault(*args)
+
+ def get(self, *args):
+ return dictionary.get(*args)
+
+ def keys(self):
+ return dictionary.keys()
+
+ def values(self):
+ return dictionary.values()
+
+ def items(self):
+ return dictionary.items()
+
+ def __iter__(self):
+ return iter(dictionary)
return wrapper()
class RequiredAttributeError(ValueError):
+
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)
diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py
index b6d8796084..d6547e933a 100644
--- a/lib/spack/llnl/util/link_tree.py
+++ b/lib/spack/llnl/util/link_tree.py
@@ -23,12 +23,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""LinkTree class for setting up trees of symbolic links."""
-__all__ = ['LinkTree']
import os
import shutil
from llnl.util.filesystem import *
+__all__ = ['LinkTree']
+
empty_file_name = '.spack-empty'
@@ -43,13 +44,13 @@ class LinkTree(object):
modified.
"""
+
def __init__(self, source_root):
if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root
-
def find_conflict(self, dest_root, **kwargs):
"""Returns the first file in dest that conflicts with src"""
kwargs['follow_nonexisting'] = False
@@ -61,9 +62,9 @@ class LinkTree(object):
return dest
return None
-
def merge(self, dest_root, **kwargs):
- """Link all files in src into dest, creating directories if necessary."""
+ """Link all files in src into dest, creating directories
+ if necessary."""
kwargs['order'] = 'pre'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
@@ -83,7 +84,6 @@ class LinkTree(object):
assert(not os.path.exists(dest))
os.symlink(src, dest)
-
def unmerge(self, dest_root, **kwargs):
"""Unlink all files in dest that exist in src.
diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py
index 479a1b0167..aa8272d5fe 100644
--- a/lib/spack/llnl/util/lock.py
+++ b/lib/spack/llnl/util/lock.py
@@ -28,6 +28,9 @@ import errno
import time
import socket
+__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
+ 'LockError']
+
# Default timeout in seconds, after which locks will raise exceptions.
_default_timeout = 60
@@ -36,13 +39,21 @@ _sleep_time = 1e-5
class Lock(object):
- def __init__(self,file_path):
+ """This is an implementation of a filesystem lock using Python's lockf.
+
+ In Python, `lockf` actually calls `fcntl`, so this should work with any
+ filesystem implementation that supports locking through the fcntl calls.
+ This includes distributed filesystems like Lustre (when flock is enabled)
+ and recent NFS versions.
+
+ """
+
+ def __init__(self, file_path):
self._file_path = file_path
self._fd = None
self._reads = 0
self._writes = 0
-
def _lock(self, op, timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
@@ -63,7 +74,9 @@ class Lock(object):
fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
if op == fcntl.LOCK_EX:
- os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
+ os.write(
+ self._fd,
+ "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
return
except IOError as error:
@@ -75,7 +88,6 @@ class Lock(object):
raise LockError("Timed out waiting for lock.")
-
def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``)
@@ -83,11 +95,10 @@ class Lock(object):
be masquerading as write locks, but this removes either.
"""
- fcntl.lockf(self._fd,fcntl.LOCK_UN)
+ fcntl.lockf(self._fd, fcntl.LOCK_UN)
os.close(self._fd)
self._fd = None
-
def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading.
@@ -107,7 +118,6 @@ class Lock(object):
self._reads += 1
return False
-
def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing.
@@ -127,7 +137,6 @@ class Lock(object):
self._writes += 1
return False
-
def release_read(self):
"""Releases a read lock.
@@ -148,7 +157,6 @@ class Lock(object):
self._reads -= 1
return False
-
def release_write(self):
"""Releases a write lock.
@@ -170,6 +178,70 @@ class Lock(object):
return False
+class LockTransaction(object):
+ """Simple nested transaction context manager that uses a file lock.
+
+ This class can trigger actions when the lock is acquired for the
+ first time and released for the last.
+
+ If the acquire_fn returns a value, it is used as the return value for
+ __enter__, allowing it to be passed as the `as` argument of a `with`
+ statement.
+
+ If acquire_fn returns a context manager, *its* `__enter__` function will be
+ called in `__enter__` after acquire_fn, and its `__exit__` funciton will be
+ called before `release_fn` in `__exit__`, allowing you to nest a context
+ manager to be used along with the lock.
+
+ Timeout for lock is customizable.
+
+ """
+
+ def __init__(self, lock, acquire_fn=None, release_fn=None,
+ timeout=_default_timeout):
+ self._lock = lock
+ self._timeout = timeout
+ self._acquire_fn = acquire_fn
+ self._release_fn = release_fn
+ self._as = None
+
+ def __enter__(self):
+ if self._enter() and self._acquire_fn:
+ self._as = self._acquire_fn()
+ if hasattr(self._as, '__enter__'):
+ return self._as.__enter__()
+ else:
+ return self._as
+
+ def __exit__(self, type, value, traceback):
+ suppress = False
+ if self._exit():
+ if self._as and hasattr(self._as, '__exit__'):
+ if self._as.__exit__(type, value, traceback):
+ suppress = True
+ if self._release_fn:
+ if self._release_fn(type, value, traceback):
+ suppress = True
+ return suppress
+
+
+class ReadTransaction(LockTransaction):
+
+ def _enter(self):
+ return self._lock.acquire_read(self._timeout)
+
+ def _exit(self):
+ return self._lock.release_read()
+
+
+class WriteTransaction(LockTransaction):
+
+ def _enter(self):
+ return self._lock.acquire_write(self._timeout)
+
+ def _exit(self):
+ return self._lock.release_write()
+
+
class LockError(Exception):
"""Raised when an attempt to acquire a lock times out."""
- pass
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index c638b113fd..db74aaba6b 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -36,6 +36,7 @@ _debug = False
_verbose = False
indent = " "
+
def is_verbose():
return _verbose
@@ -64,12 +65,14 @@ def info(message, *args, **kwargs):
format = kwargs.get('format', '*b')
stream = kwargs.get('stream', sys.stdout)
wrap = kwargs.get('wrap', False)
+ break_long_words = kwargs.get('break_long_words', False)
cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
for arg in args:
if wrap:
lines = textwrap.wrap(
- str(arg), initial_indent=indent, subsequent_indent=indent)
+ str(arg), initial_indent=indent, subsequent_indent=indent,
+ break_long_words=break_long_words)
for line in lines:
stream.write(line + '\n')
else:
@@ -146,7 +149,8 @@ def get_yes_or_no(prompt, **kwargs):
elif default_value is False:
prompt += ' [y/N] '
else:
- raise ValueError("default for get_yes_no() must be True, False, or None.")
+ raise ValueError(
+ "default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
@@ -172,8 +176,9 @@ def hline(label=None, **kwargs):
char = kwargs.pop('char', '-')
max_width = kwargs.pop('max_width', 64)
if kwargs:
- raise TypeError("'%s' is an invalid keyword argument for this function."
- % next(kwargs.iterkeys()))
+ raise TypeError(
+ "'%s' is an invalid keyword argument for this function."
+ % next(kwargs.iterkeys()))
rows, cols = terminal_size()
if not cols:
@@ -198,7 +203,8 @@ def terminal_size():
"""Gets the dimensions of the console: (rows, cols)."""
def ioctl_GWINSZ(fd):
try:
- rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
+ rc = struct.unpack('hh', fcntl.ioctl(
+ fd, termios.TIOCGWINSZ, '1234'))
except:
return
return rc
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index 429ba45882..67acdfa517 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -27,15 +27,14 @@ Routines for printing columnar output. See colify() for more information.
"""
import os
import sys
-import fcntl
-import termios
-import struct
from StringIO import StringIO
from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen, cextra
+
class ColumnConfig:
+
def __init__(self, cols):
self.cols = cols
self.line_length = 0
@@ -43,7 +42,8 @@ class ColumnConfig:
self.widths = [0] * cols # does not include ansi colors
def __repr__(self):
- attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
+ attrs = [(a, getattr(self, a))
+ for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
@@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0):
max_cols = min(len(elts), max_cols)
# Range of column counts to try. If forced, use the supplied value.
- col_range = [cols] if cols else xrange(1, max_cols+1)
+ col_range = [cols] if cols else xrange(1, max_cols + 1)
# Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range]
@@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
# 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding
- max_clen = max(len(e) for e in elts) + padding
if cols == 0:
cols = max(1, console_width / max_len)
cols = min(len(elts), cols)
@@ -130,17 +129,19 @@ def colify(elts, **options):
output=<stream> A file object to write to. Default is sys.stdout.
indent=<int> Optionally indent all columns by some number of spaces.
padding=<int> Spaces between columns. Default is 2.
- width=<int> Width of the output. Default is 80 if tty is not detected.
+ width=<int> Width of the output. Default is 80 if tty not detected.
cols=<int> Force number of columns. Default is to size to terminal,
or single-column if no tty
tty=<bool> Whether to attempt to write to a tty. Default is to
- autodetect a tty. Set to False to force single-column output.
+ autodetect a tty. Set to False to force
+ single-column output.
- method=<string> Method to use to fit columns. Options are variable or uniform.
- Variable-width columns are tighter, uniform columns are all the
- same width and fit less data on the screen.
+ method=<string> Method to use to fit columns. Options are variable or
+ uniform. Variable-width columns are tighter, uniform
+ columns are all the same width and fit less data on
+ the screen.
"""
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)
@@ -152,8 +153,9 @@ def colify(elts, **options):
console_cols = options.pop("width", None)
if options:
- raise TypeError("'%s' is an invalid keyword argument for this function."
- % next(options.iterkeys()))
+ raise TypeError(
+ "'%s' is an invalid keyword argument for this function."
+ % next(options.iterkeys()))
# elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts]
@@ -167,7 +169,8 @@ def colify(elts, **options):
r, c = env_size.split('x')
console_rows, console_cols = int(r), int(c)
tty = True
- except: pass
+ except:
+ pass
# Use only one column if not a tty.
if not tty:
@@ -198,8 +201,13 @@ def colify(elts, **options):
for col in xrange(cols):
elt = col * rows + row
width = config.widths[col] + cextra(elts[elt])
- fmt = '%%-%ds' % width
- output.write(fmt % elts[elt])
+ if col < cols - 1:
+ fmt = '%%-%ds' % width
+ output.write(fmt % elts[elt])
+ else:
+ # Don't pad the rightmost column (sapces can wrap on
+ # small teriminals if one line is overlong)
+ output.write(elts[elt])
output.write("\n")
row += 1
@@ -223,6 +231,7 @@ def colify_table(table, **options):
raise ValueError("Table is empty in colify_table!")
columns = len(table[0])
+
def transpose():
for i in xrange(columns):
for row in table:
diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py
index 0abcb09b97..b0c00f1502 100644
--- a/lib/spack/llnl/util/tty/color.py
+++ b/lib/spack/llnl/util/tty/color.py
@@ -75,25 +75,27 @@ To output an @, use '@@'. To output a } inside braces, use '}}'.
import re
import sys
+
class ColorParseError(Exception):
"""Raised when a color format fails to parse."""
+
def __init__(self, message):
super(ColorParseError, self).__init__(message)
# Text styles for ansi codes
-styles = {'*' : '1', # bold
- '_' : '4', # underline
- None : '0' } # plain
+styles = {'*': '1', # bold
+ '_': '4', # underline
+ None: '0'} # plain
# Dim and bright ansi colors
-colors = {'k' : 30, 'K' : 90, # black
- 'r' : 31, 'R' : 91, # red
- 'g' : 32, 'G' : 92, # green
- 'y' : 33, 'Y' : 93, # yellow
- 'b' : 34, 'B' : 94, # blue
- 'm' : 35, 'M' : 95, # magenta
- 'c' : 36, 'C' : 96, # cyan
- 'w' : 37, 'W' : 97 } # white
+colors = {'k': 30, 'K': 90, # black
+ 'r': 31, 'R': 91, # red
+ 'g': 32, 'G': 92, # green
+ 'y': 33, 'Y': 93, # yellow
+ 'b': 34, 'B': 94, # blue
+ 'm': 35, 'M': 95, # magenta
+ 'c': 36, 'C': 96, # cyan
+ 'w': 37, 'W': 97} # white
# Regex to be used for color formatting
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
@@ -104,6 +106,7 @@ _force_color = False
class match_to_ansi(object):
+
def __init__(self, color=True):
self.color = color
@@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None):
"""Same as cwrite, but writes a trailing newline to the stream."""
cwrite(string + "\n", stream, color)
+
def cescape(string):
"""Replace all @ with @@ in the string provided."""
return str(string).replace('@', '@@')
class ColorStream(object):
+
def __init__(self, stream, color=None):
self._stream = stream
self._color = color
@@ -196,7 +201,7 @@ class ColorStream(object):
color = self._color
if self._color is None:
if raw:
- color=True
+ color = True
else:
color = self._stream.isatty() or _force_color
raw_write(colorize(string, color=color))
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index ca82da7b17..b67edcf9cc 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -36,6 +36,7 @@ import llnl.util.tty.color as color
# Use this to strip escape sequences
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
+
def _strip(line):
"""Strip color and control characters from a line."""
return _escape.sub('', line)
@@ -58,10 +59,10 @@ class keyboard_input(object):
When the with block completes, this will restore settings before
canonical and echo were disabled.
"""
+
def __init__(self, stream):
self.stream = stream
-
def __enter__(self):
self.old_cfg = None
@@ -86,10 +87,9 @@ class keyboard_input(object):
# Apply new settings for terminal
termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg)
- except Exception, e:
+ except Exception:
pass # Some OS's do not support termios, so ignore.
-
def __exit__(self, exc_type, exception, traceback):
# If termios was avaialble, restore old settings after the
# with block
@@ -114,6 +114,7 @@ class log_output(object):
Closes the provided stream when done with the block.
If echo is True, also prints the output to stdout.
"""
+
def __init__(self, stream, echo=False, force_color=False, debug=False):
self.stream = stream
@@ -122,7 +123,7 @@ class log_output(object):
self.force_color = force_color
self.debug = debug
- # Default is to try file-descriptor reassignment unless the system
+ # Default is to try file-descriptor reassignment unless the system
# out/err streams do not have an associated file descriptor
self.directAssignment = False
@@ -130,7 +131,6 @@ class log_output(object):
"""Jumps to __exit__ on the child process."""
raise _SkipWithBlock()
-
def __enter__(self):
"""Redirect output from the with block to a file.
@@ -154,7 +154,8 @@ class log_output(object):
with self.stream as log_file:
with keyboard_input(sys.stdin):
while True:
- rlist, w, x = select.select([read_file, sys.stdin], [], [])
+ rlist, w, x = select.select(
+ [read_file, sys.stdin], [], [])
if not rlist:
break
@@ -211,7 +212,6 @@ class log_output(object):
if self.debug:
tty._debug = True
-
def __exit__(self, exc_type, exception, traceback):
"""Exits on child, handles skipping the with block on parent."""
# Child should just exit here.
@@ -235,7 +235,7 @@ class log_output(object):
sys.stderr = self._stderr
else:
os.dup2(self._stdout, sys.stdout.fileno())
- os.dup2(self._stderr, sys.stderr.fileno())
+ os.dup2(self._stderr, sys.stderr.fileno())
return False
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 8c6e0ba527..3d508d0fde 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -1,3 +1,4 @@
+# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -39,13 +40,26 @@ spack_file = join_path(spack_root, "bin", "spack")
lib_path = join_path(spack_root, "lib", "spack")
build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
+platform_path = join_path(module_path, 'platforms')
compilers_path = join_path(module_path, "compilers")
+operating_system_path = join_path(module_path, 'operating_systems')
test_path = join_path(module_path, "test")
hooks_path = join_path(module_path, "hooks")
var_path = join_path(spack_root, "var", "spack")
stage_path = join_path(var_path, "stage")
repos_path = join_path(var_path, "repos")
share_path = join_path(spack_root, "share", "spack")
+cache_path = join_path(var_path, "cache")
+
+# User configuration location
+user_config_path = os.path.expanduser('~/.spack')
+
+import spack.fetch_strategy
+fetch_cache = spack.fetch_strategy.FsCache(cache_path)
+
+from spack.file_cache import FileCache
+user_cache_path = join_path(user_config_path, 'cache')
+user_cache = FileCache(user_cache_path)
prefix = spack_root
opt_path = join_path(prefix, "opt")
@@ -134,7 +148,7 @@ _tmp_user = getpass.getuser()
_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
for path in _tmp_candidates:
# don't add a second username if it's already unique by user.
- if not _tmp_user in path:
+ if _tmp_user not in path:
tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
else:
tmp_dirs.append(join_path(path, 'spack-stage'))
@@ -166,13 +180,17 @@ sys_type = None
# Spack internal code should call 'import spack' and accesses other
# variables (spack.repo, paths, etc.) directly.
#
-# TODO: maybe this should be separated out and should go in build_environment.py?
-# TODO: it's not clear where all the stuff that needs to be included in packages
-# should live. This file is overloaded for spack core vs. for packages.
+# TODO: maybe this should be separated out to build_environment.py?
+# TODO: it's not clear where all the stuff that needs to be included in
+# packages should live. This file is overloaded for spack core vs.
+# for packages.
#
-__all__ = ['Package', 'Version', 'when', 'ver']
+__all__ = ['Package', 'StagedPackage', 'CMakePackage',
+ 'Version', 'when', 'ver', 'alldeps', 'nolink']
from spack.package import Package, ExtensionConflictError
+from spack.package import StagedPackage, CMakePackage
from spack.version import Version, ver
+from spack.spec import DependencySpec, alldeps, nolink
from spack.multimethod import when
import llnl.util.filesystem
@@ -188,8 +206,8 @@ from spack.util.executable import *
__all__ += spack.util.executable.__all__
from spack.package import \
- install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
- InstallError, ExternalPackageError
+ install_dependency_symlinks, flatten_dependencies, \
+ DependencyConflictError, InstallError, ExternalPackageError
__all__ += [
- 'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
- 'InstallError', 'ExternalPackageError']
+ 'install_dependency_symlinks', 'flatten_dependencies',
+ 'DependencyConflictError', 'InstallError', 'ExternalPackageError']
diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py
index 91d1d2003d..064abb9782 100644
--- a/lib/spack/spack/abi.py
+++ b/lib/spack/spack/abi.py
@@ -30,14 +30,15 @@ from spack.spec import CompilerSpec
from spack.util.executable import Executable, ProcessError
from llnl.util.lang import memoized
+
class ABI(object):
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(self, parent, child):
- """Returns true iff the parent and child specs have ABI compatible architectures."""
- return not parent.architecture or not child.architecture or parent.architecture == child.architecture
-
+ """Return true if parent and child have ABI compatible targets."""
+ return not parent.architecture or not child.architecture or \
+ parent.architecture == child.architecture
@memoized
def _gcc_get_libstdcxx_version(self, version):
@@ -60,8 +61,9 @@ class ABI(object):
else:
return None
try:
- output = rungcc("--print-file-name=%s" % libname, return_output=True)
- except ProcessError, e:
+ output = rungcc("--print-file-name=%s" % libname,
+ return_output=True)
+ except ProcessError:
return None
if not output:
return None
@@ -70,7 +72,6 @@ class ABI(object):
return None
return os.path.basename(libpath)
-
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
@@ -81,7 +82,6 @@ class ABI(object):
return False
return plib == clib
-
def _intel_compiler_compare(self, pversion, cversion):
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
@@ -91,9 +91,8 @@ class ABI(object):
return False
return pversion.version[:2] == cversion.version[:2]
-
def compiler_compatible(self, parent, child, **kwargs):
- """Returns true iff the compilers for parent and child specs are ABI compatible"""
+ """Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
@@ -108,8 +107,8 @@ class ABI(object):
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
- # For a few compilers use specialized comparisons. Otherwise
- # match on version match.
+ # For a few compilers use specialized comparisons.
+ # Otherwise match on version match.
if pversion.satisfies(cversion):
return True
elif (parent.compiler.name == "gcc" and
@@ -120,9 +119,8 @@ class ABI(object):
return True
return False
-
def compatible(self, parent, child, **kwargs):
"""Returns true iff a parent and child spec are ABI compatible"""
loosematch = kwargs.get('loose', False)
return self.architecture_compatible(parent, child) and \
- self.compiler_compatible(parent, child, loose=loosematch)
+ self.compiler_compatible(parent, child, loose=loosematch)
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index b14cb2bea2..0d210f9741 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -22,68 +22,523 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+"""
+This module contains all the elements that are required to create an
+architecture object. These include, the target processor, the operating system,
+and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
+
+On a multiple architecture machine, the architecture spec field can be set to
+build a package against any target and operating system that is present on the
+platform. On Cray platforms or any other architecture that has different front
+and back end environments, the operating system will determine the method of
+compiler
+detection.
+
+There are two different types of compiler detection:
+ 1. Through the $PATH env variable (front-end detection)
+ 2. Through the tcl module system. (back-end detection)
+
+Depending on which operating system is specified, the compiler will be detected
+using one of those methods.
+
+For platforms such as linux and darwin, the operating system is autodetected
+and the target is set to be x86_64.
+
+The command line syntax for specifying an architecture is as follows:
+
+ target=<Target name> os=<OperatingSystem name>
+
+If the user wishes to use the defaults, either target or os can be left out of
+the command line and Spack will concretize using the default. These defaults
+are set in the 'platforms/' directory which contains the different subclasses
+for platforms. If the machine has multiple architectures, the user can
+also enter front-end, or fe or back-end or be. These settings will concretize
+to their respective front-end and back-end targets and operating systems.
+Additional platforms can be added by creating a subclass of Platform
+and adding it inside the platform directory.
+
+Platforms are an abstract class that are extended by subclasses. If the user
+wants to add a new type of platform (such as cray_xe), they can create a
+subclass and set all the class attributes such as priority, front_target,
+back_target, front_os, back_os. Platforms also contain a priority class
+attribute. A lower number signifies higher priority. These numbers are
+arbitrarily set and can be changed though often there isn't much need unless a
+new platform is added and the user wants that to be detected first.
+
+Targets are created inside the platform subclasses. Most architecture
+(like linux, and darwin) will have only one target (x86_64) but in the case of
+Cray machines, there is both a frontend and backend processor. The user can
+specify which targets are present on front-end and back-end architecture
+
+Depending on the platform, operating systems are either auto-detected or are
+set. The user can set the front-end and back-end operating setting by the class
+attributes front_os and back_os. The operating system as described earlier,
+will be responsible for compiler detection.
+"""
import os
-import re
-import platform
+import inspect
-from llnl.util.lang import memoized
+from llnl.util.lang import memoized, list_modules, key_ordering
+from llnl.util.filesystem import join_path
+import llnl.util.tty as tty
import spack
+import spack.compilers
+from spack.util.naming import mod_to_class
+from spack.util.environment import get_path
+from spack.util.multiproc import parmap
import spack.error as serr
-class InvalidSysTypeError(serr.SpackError):
- def __init__(self, sys_type):
- super(InvalidSysTypeError,
- self).__init__("Invalid sys_type value for Spack: " + sys_type)
-
+class NoPlatformError(serr.SpackError):
-class NoSysTypeError(serr.SpackError):
def __init__(self):
- super(NoSysTypeError,
- self).__init__("Could not determine sys_type for this machine.")
+ super(NoPlatformError, self).__init__(
+ "Could not determine a platform for this machine.")
+
+
+@key_ordering
+class Target(object):
+ """ Target is the processor of the host machine.
+ The host machine may have different front-end and back-end targets,
+ especially if it is a Cray machine. The target will have a name and
+ also the module_name (e.g craype-compiler). Targets will also
+ recognize which platform they came from using the set_platform method.
+ Targets will have compiler finding strategies
+ """
+
+ def __init__(self, name, module_name=None):
+ self.name = name # case of cray "ivybridge" but if it's x86_64
+ self.module_name = module_name # craype-ivybridge
+
+ # Sets only the platform name to avoid recursiveness
+
+ def _cmp_key(self):
+ return (self.name, self.module_name)
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return self.name
+
+
+@key_ordering
+class Platform(object):
+ """ Abstract class that each type of Platform will subclass.
+ Will return a instance of it once it
+ is returned
+ """
+
+ priority = None # Subclass sets number. Controls detection order
+ front_end = None
+ back_end = None
+ default = None # The default back end target. On cray ivybridge
+
+ front_os = None
+ back_os = None
+ default_os = None
+
+ def __init__(self, name):
+ self.targets = {}
+ self.operating_sys = {}
+ self.name = name
+
+ def add_target(self, name, target):
+ """Used by the platform specific subclass to list available targets.
+ Raises an error if the platform specifies a name
+ that is reserved by spack as an alias.
+ """
+ if name in ['frontend', 'fe', 'backend', 'be', 'default_target']:
+ raise ValueError(
+ "%s is a spack reserved alias "
+ "and cannot be the name of a target" % name)
+ self.targets[name] = target
+
+ def target(self, name):
+ """This is a getter method for the target dictionary
+ that handles defaulting based on the values provided by default,
+ front-end, and back-end. This can be overwritten
+ by a subclass for which we want to provide further aliasing options.
+ """
+ if name == 'default_target':
+ name = self.default
+ elif name == 'frontend' or name == 'fe':
+ name = self.front_end
+ elif name == 'backend' or name == 'be':
+ name = self.back_end
+
+ return self.targets.get(name, None)
+
+ def add_operating_system(self, name, os_class):
+ """ Add the operating_system class object into the
+ platform.operating_sys dictionary
+ """
+ if name in ['frontend', 'fe', 'backend', 'be', 'default_os']:
+ raise ValueError(
+ "%s is a spack reserved alias "
+ "and cannot be the name of an OS" % name)
+ self.operating_sys[name] = os_class
+
+ def operating_system(self, name):
+ if name == 'default_os':
+ name = self.default_os
+ if name == 'frontend' or name == "fe":
+ name = self.front_os
+ if name == 'backend' or name == 'be':
+ name = self.back_os
+
+ return self.operating_sys.get(name, None)
+
+ @classmethod
+ def setup_platform_environment(self, pkg, env):
+ """ Subclass can override this method if it requires any
+ platform-specific build environment modifications.
+ """
+ pass
+
+ @classmethod
+ def detect(self):
+ """ Subclass is responsible for implementing this method.
+ Returns True if the Platform class detects that
+ it is the current platform
+ and False if it's not.
+ """
+ raise NotImplementedError()
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return self.name
+
+ def _cmp_key(self):
+ t_keys = ''.join(str(t._cmp_key()) for t in
+ sorted(self.targets.values()))
+ o_keys = ''.join(str(o._cmp_key()) for o in
+ sorted(self.operating_sys.values()))
+ return (self.name,
+ self.default,
+ self.front_end,
+ self.back_end,
+ self.default_os,
+ self.front_os,
+ self.back_os,
+ t_keys,
+ o_keys)
+
+
+@key_ordering
+class OperatingSystem(object):
+ """ Operating System will be like a class similar to platform extended
+ by subclasses for the specifics. Operating System will contain the
+ compiler finding logic. Instead of calling two separate methods to
+ find compilers we call find_compilers method for each operating system
+ """
+
+ def __init__(self, name, version):
+ self.name = name
+ self.version = version
+
+ def __str__(self):
+ return self.name + self.version
+
+ def __repr__(self):
+ return self.__str__()
+
+ def _cmp_key(self):
+ return (self.name, self.version)
+
+ def find_compilers(self, *paths):
+ """
+ Return a list of compilers found in the suppied paths.
+ This invokes the find() method for each Compiler class,
+ and appends the compilers detected to a list.
+ """
+ if not paths:
+ paths = get_path('PATH')
+ # Make sure path elements exist, and include /bin directories
+ # under prefixes.
+ filtered_path = []
+ for p in paths:
+ # Eliminate symlinks and just take the real directories.
+ p = os.path.realpath(p)
+ if not os.path.isdir(p):
+ continue
+ filtered_path.append(p)
+
+ # Check for a bin directory, add it if it exists
+ bin = join_path(p, 'bin')
+ if os.path.isdir(bin):
+ filtered_path.append(os.path.realpath(bin))
+
+ # Once the paths are cleaned up, do a search for each type of
+ # compiler. We can spawn a bunch of parallel searches to reduce
+ # the overhead of spelunking all these directories.
+ types = spack.compilers.all_compiler_types()
+ compiler_lists = parmap(lambda cmp_cls:
+ self.find_compiler(cmp_cls, *filtered_path),
+ types)
+
+ # ensure all the version calls we made are cached in the parent
+ # process, as well. This speeds up Spack a lot.
+ clist = reduce(lambda x, y: x + y, compiler_lists)
+ return clist
+
+ def find_compiler(self, cmp_cls, *path):
+ """Try to find the given type of compiler in the user's
+ environment. For each set of compilers found, this returns
+ compiler objects with the cc, cxx, f77, fc paths and the
+ version filled in.
+ This will search for compilers with the names in cc_names,
+ cxx_names, etc. and it will group them if they have common
+ prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
+ be grouped with g++-mp-4.7 and gfortran-mp-4.7.
+ """
+ dicts = parmap(
+ lambda t: cmp_cls._find_matches_in_path(*t),
+ [(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path),
+ (cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
+ (cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
+ (cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)])
-def get_sys_type_from_spack_globals():
- """Return the SYS_TYPE from spack globals, or None if it isn't set."""
- if not hasattr(spack, "sys_type"):
- return None
- elif hasattr(spack.sys_type, "__call__"):
- return spack.sys_type()
+ all_keys = set()
+ for d in dicts:
+ all_keys.update(d)
+
+ compilers = {}
+ for k in all_keys:
+ ver, pre, suf = k
+
+ # Skip compilers with unknown version.
+ if ver == 'unknown':
+ continue
+
+ paths = tuple(pn[k] if k in pn else None for pn in dicts)
+ spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
+
+ if ver in compilers:
+ prev = compilers[ver]
+
+ # prefer the one with more compilers.
+ prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
+ newcount = len([p for p in paths if p is not None])
+ prevcount = len([p for p in prev_paths if p is not None])
+
+ # Don't add if it's not an improvement over prev compiler.
+ if newcount <= prevcount:
+ continue
+
+ compilers[ver] = cmp_cls(spec, self, paths)
+
+ return list(compilers.values())
+
+ def to_dict(self):
+ d = {}
+ d['name'] = self.name
+ d['version'] = self.version
+ return d
+
+
+@key_ordering
+class Arch(object):
+ """Architecture is now a class to help with setting attributes.
+
+ TODO: refactor so that we don't need this class.
+ """
+
+ def __init__(self, plat=None, os=None, target=None):
+ self.platform = plat
+ if plat and os:
+ os = self.platform.operating_system(os)
+ self.platform_os = os
+ if plat and target:
+ target = self.platform.target(target)
+ self.target = target
+
+ # Hooks for parser to use when platform is set after target or os
+ self.target_string = None
+ self.os_string = None
+
+ @property
+ def concrete(self):
+ return all((self.platform is not None,
+ isinstance(self.platform, Platform),
+ self.platform_os is not None,
+ isinstance(self.platform_os, OperatingSystem),
+ self.target is not None, isinstance(self.target, Target)))
+
+ def __str__(self):
+ if self.platform or self.platform_os or self.target:
+ if self.platform.name == 'darwin':
+ os_name = self.platform_os.name if self.platform_os else "None"
+ else:
+ os_name = str(self.platform_os)
+
+ return (str(self.platform) + "-" +
+ os_name + "-" + str(self.target))
+ else:
+ return ''
+
+ def __contains__(self, string):
+ return string in str(self)
+
+ # TODO: make this unnecessary: don't include an empty arch on *every* spec.
+ def __nonzero__(self):
+ return (self.platform is not None or
+ self.platform_os is not None or
+ self.target is not None)
+ __bool__ = __nonzero__
+
+ def _cmp_key(self):
+ if isinstance(self.platform, Platform):
+ platform = self.platform.name
+ else:
+ platform = self.platform
+ if isinstance(self.platform_os, OperatingSystem):
+ platform_os = self.platform_os.name
+ else:
+ platform_os = self.platform_os
+ if isinstance(self.target, Target):
+ target = self.target.name
+ else:
+ target = self.target
+ return (platform, platform_os, target)
+
+ def to_dict(self):
+ d = {}
+ d['platform'] = str(self.platform) if self.platform else None
+ d['platform_os'] = str(self.platform_os) if self.platform_os else None
+ d['target'] = str(self.target) if self.target else None
+
+ return d
+
+
+def _target_from_dict(target_name, plat=None):
+ """ Creates new instance of target and assigns all the attributes of
+ that target from the dictionary
+ """
+ if not plat:
+ plat = platform()
+ return plat.target(target_name)
+
+
+def _operating_system_from_dict(os_name, plat=None):
+ """ uses platform's operating system method to grab the constructed
+ operating systems that are valid on the platform.
+ """
+ if not plat:
+ plat = platform()
+ if isinstance(os_name, dict):
+ name = os_name['name']
+ version = os_name['version']
+ return plat.operating_system(name + version)
else:
- return spack.sys_type
+ return plat.operating_system(os_name)
+
+def _platform_from_dict(platform_name):
+ """ Constructs a platform from a dictionary. """
+ platform_list = all_platforms()
+ for p in platform_list:
+ if platform_name.replace("_", "").lower() == p.__name__.lower():
+ return p()
-def get_sys_type_from_environment():
- """Return $SYS_TYPE or None if it's not defined."""
- return os.environ.get('SYS_TYPE')
+def arch_from_dict(d):
+ """ Uses _platform_from_dict, _operating_system_from_dict, _target_from_dict
+ helper methods to recreate the arch tuple from the dictionary read from
+ a yaml file
+ """
+ arch = Arch()
-def get_sys_type_from_platform():
- """Return the architecture from Python's platform module."""
- sys_type = platform.system() + '-' + platform.machine()
- sys_type = re.sub(r'[^\w-]', '_', sys_type)
- return sys_type.lower()
+ if isinstance(d, basestring):
+ # We have an old spec using a string for the architecture
+ arch.platform = Platform('spack_compatibility')
+ arch.platform_os = OperatingSystem('unknown', '')
+ arch.target = Target(d)
+
+ arch.os_string = None
+ arch.target_string = None
+ else:
+ if d is None:
+ return None
+ platform_name = d['platform']
+ os_name = d['platform_os']
+ target_name = d['target']
+
+ if platform_name:
+ arch.platform = _platform_from_dict(platform_name)
+ else:
+ arch.platform = None
+ if target_name:
+ arch.target = _target_from_dict(target_name, arch.platform)
+ else:
+ arch.target = None
+ if os_name:
+ arch.platform_os = _operating_system_from_dict(os_name,
+ arch.platform)
+ else:
+ arch.platform_os = None
+
+ arch.os_string = None
+ arch.target_string = None
+
+ return arch
@memoized
-def sys_type():
- """Returns a SysType for the current machine."""
- methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
- get_sys_type_from_platform]
+def all_platforms():
+ classes = []
+ mod_path = spack.platform_path
+ parent_module = "spack.platforms"
- # search for a method that doesn't return None
- sys_type = None
- for method in methods:
- sys_type = method()
- if sys_type:
- break
+ for name in list_modules(mod_path):
+ mod_name = '%s.%s' % (parent_module, name)
+ class_name = mod_to_class(name)
+ mod = __import__(mod_name, fromlist=[class_name])
+ if not hasattr(mod, class_name):
+ tty.die('No class %s defined in %s' % (class_name, mod_name))
+ cls = getattr(mod, class_name)
+ if not inspect.isclass(cls):
+ tty.die('%s.%s is not a class' % (mod_name, class_name))
+
+ classes.append(cls)
+
+ return classes
+
+
+@memoized
+def platform():
+ """Detects the platform for this machine.
+
+ Gather a list of all available subclasses of platforms.
+ Sorts the list according to their priority looking. Priority is
+ an arbitrarily set number. Detects platform either using uname or
+ a file path (/opt/cray...)
+ """
+ # Try to create a Platform object using the config file FIRST
+ platform_list = all_platforms()
+ platform_list.sort(key=lambda a: a.priority)
+
+ for platform_cls in platform_list:
+ if platform_cls.detect():
+ return platform_cls()
+
+
+@memoized
+def sys_type():
+ """Print out the "default" platform-os-target tuple for this machine.
- # Couldn't determine the sys_type for this machine.
- if sys_type is None:
- return "unknown_arch"
+ On machines with only one target OS/target, prints out the
+ platform-os-target for the frontend. For machines with a frontend
+ and a backend, prints the default backend.
- if not isinstance(sys_type, basestring):
- raise InvalidSysTypeError(sys_type)
+ TODO: replace with use of more explicit methods to get *all* the
+ backends, as client code should really be aware of cross-compiled
+ architectures.
- return sys_type
+ """
+ arch = Arch(platform(), 'default_os', 'default_target')
+ return str(arch)
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index d87aaa6285..5affd3c7c5 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -75,20 +75,19 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
# set_build_environment_variables and used to pass parameters to
# Spack's compiler wrappers.
#
-SPACK_ENV_PATH = 'SPACK_ENV_PATH'
-SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
-SPACK_PREFIX = 'SPACK_PREFIX'
-SPACK_INSTALL = 'SPACK_INSTALL'
-SPACK_DEBUG = 'SPACK_DEBUG'
-SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
-SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
+SPACK_ENV_PATH = 'SPACK_ENV_PATH'
+SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
+SPACK_PREFIX = 'SPACK_PREFIX'
+SPACK_INSTALL = 'SPACK_INSTALL'
+SPACK_DEBUG = 'SPACK_DEBUG'
+SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
+SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
# Platform-specific library suffix.
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
-
class MakeExecutable(Executable):
"""Special callable executable object for make so the user can
specify parallel or not on a per-invocation basis. Using
@@ -99,6 +98,7 @@ class MakeExecutable(Executable):
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
everything.
"""
+
def __init__(self, name, jobs):
super(MakeExecutable, self).__init__(name)
self.jobs = jobs
@@ -114,32 +114,95 @@ class MakeExecutable(Executable):
return super(MakeExecutable, self).__call__(*args, **kwargs)
+def load_module(mod):
+ """Takes a module name and removes modules until it is possible to
+ load that module. It then loads the provided module. Depends on the
+ modulecmd implementation of modules used in cray and lmod.
+ """
+ # Create an executable of the module command that will output python code
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Read the module and remove any conflicting modules
+ # We do this without checking that they are already installed
+ # for ease of programming because unloading a module that is not
+ # loaded does nothing.
+ text = modulecmd('show', mod, output=str, error=str).split()
+ for i, word in enumerate(text):
+ if word == 'conflict':
+ exec(compile(modulecmd('unload', text[i + 1], output=str,
+ error=str), '<string>', 'exec'))
+ # Load the module now that there are no conflicts
+ load = modulecmd('load', mod, output=str, error=str)
+ exec(compile(load, '<string>', 'exec'))
+
+
+def get_path_from_module(mod):
+ """Inspects a TCL module for entries that indicate the absolute path
+ at which the library supported by said module can be found.
+ """
+ # Create a modulecmd executable
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Read the module
+ text = modulecmd('show', mod, output=str, error=str).split('\n')
+ # If it lists its package directory, return that
+ for line in text:
+ if line.find(mod.upper() + '_DIR') >= 0:
+ words = line.split()
+ return words[2]
+
+ # If it lists a -rpath instruction, use that
+ for line in text:
+ rpath = line.find('-rpath/')
+ if rpath >= 0:
+ return line[rpath + 6:line.find('/lib')]
+
+ # If it lists a -L instruction, use that
+ for line in text:
+ L = line.find('-L/')
+ if L >= 0:
+ return line[L + 2:line.find('/lib')]
+
+ # If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that
+ for line in text:
+ if line.find('LD_LIBRARY_PATH') >= 0:
+ words = line.split()
+ path = words[2]
+ return path[:path.find('/lib')]
+ # Unable to find module path
+ return None
+
+
def set_compiler_environment_variables(pkg, env):
- assert pkg.spec.concrete
+ assert(pkg.spec.concrete)
compiler = pkg.compiler
flags = pkg.spec.compiler_flags
# Set compiler variables used by CMake and autotools
- assert all(key in compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
+ assert all(key in compiler.link_paths for key in (
+ 'cc', 'cxx', 'f77', 'fc'))
# Populate an object with the list of environment modifications
# and return it
- # TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
+ # TODO : add additional kwargs for better diagnostics, like requestor,
+ # ttyout, ttyerr, etc.
link_dir = spack.build_env_path
- env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
- env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
- env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
- env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
# Set SPACK compiler variables so that our wrapper knows what to call
if compiler.cc:
env.set('SPACK_CC', compiler.cc)
+ env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
if compiler.cxx:
env.set('SPACK_CXX', compiler.cxx)
+ env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
if compiler.f77:
env.set('SPACK_F77', compiler.f77)
+ env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
if compiler.fc:
env.set('SPACK_FC', compiler.fc)
+ env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
@@ -154,12 +217,19 @@ def set_compiler_environment_variables(pkg, env):
env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
+
+ for mod in compiler.modules:
+ load_module(mod)
+
return env
-def set_build_environment_variables(pkg, env):
+def set_build_environment_variables(pkg, env, dirty=False):
"""
- This ensures a clean install environment when we build packages
+ This ensures a clean install environment when we build packages.
+
+ Arguments:
+ dirty -- skip unsetting the user's environment settings.
"""
# Add spack build environment path with compiler wrappers first in
# the path. We add both spack.env_path, which includes default
@@ -172,7 +242,8 @@ def set_build_environment_variables(pkg, env):
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
- for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]:
+ compiler_specific = join_path(spack.build_env_path, pkg.compiler.name)
+ for item in [spack.build_env_path, compiler_specific]:
env_paths.append(item)
ci = join_path(item, 'case-insensitive')
if os.path.isdir(ci):
@@ -183,9 +254,11 @@ def set_build_environment_variables(pkg, env):
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
- dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
+ dep_prefixes = [d.prefix
+ for d in pkg.spec.traverse(root=False, deptype='build')]
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
- env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH
+ # Add dependencies to CMAKE_PREFIX_PATH
+ env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
# Install prefix
env.set(SPACK_PREFIX, pkg.prefix)
@@ -193,15 +266,30 @@ def set_build_environment_variables(pkg, env):
# Install root prefix
env.set(SPACK_INSTALL, spack.install_path)
- # Remove these vars from the environment during build because they
- # can affect how some packages find libraries. We want to make
- # sure that builds never pull in unintended external dependencies.
- env.unset('LD_LIBRARY_PATH')
- env.unset('LD_RUN_PATH')
- env.unset('DYLD_LIBRARY_PATH')
+ # Stuff in here sanitizes the build environemnt to eliminate
+ # anything the user has set that may interfere.
+ if not dirty:
+ # Remove these vars from the environment during build because they
+ # can affect how some packages find libraries. We want to make
+ # sure that builds never pull in unintended external dependencies.
+ env.unset('LD_LIBRARY_PATH')
+ env.unset('LIBRARY_PATH')
+ env.unset('CPATH')
+ env.unset('LD_RUN_PATH')
+ env.unset('DYLD_LIBRARY_PATH')
+
+ # Remove any macports installs from the PATH. The macports ld can
+ # cause conflicts with the built-in linker on el capitan. Solves
+ # assembler issues, e.g.:
+ # suffix or operands invalid for `movq'"
+ path = get_path('PATH')
+ for p in path:
+ if '/macports/' in p:
+ env.remove_path('PATH', p)
# Add bin directories from dependencies to the PATH for the build.
- bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
+ bin_dirs = reversed(
+ filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
for item in bin_dirs:
env.prepend_path('PATH', item)
@@ -212,13 +300,14 @@ def set_build_environment_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
# Add any pkgconfig directories to PKG_CONFIG_PATH
- pkg_config_dirs = []
- for p in dep_prefixes:
- for maybe in ('lib', 'lib64', 'share'):
- pcdir = join_path(p, maybe, 'pkgconfig')
+ for pre in dep_prefixes:
+ for directory in ('lib', 'lib64', 'share'):
+ pcdir = join_path(pre, directory, 'pkgconfig')
if os.path.isdir(pcdir):
- pkg_config_dirs.append(pcdir)
- env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
+ env.prepend_path('PKG_CONFIG_PATH', pcdir)
+
+ if pkg.spec.architecture.target.module_name:
+ load_module(pkg.spec.architecture.target.module_name)
return env
@@ -227,7 +316,7 @@ def set_module_variables_for_package(pkg, module):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
- # number of jobs spack will to build with.
+ # number of jobs spack will build with.
jobs = multiprocessing.cpu_count()
if not pkg.parallel:
jobs = 1
@@ -238,8 +327,9 @@ def set_module_variables_for_package(pkg, module):
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
- m.make = MakeExecutable('make', jobs)
+ m.make = MakeExecutable('make', jobs)
m.gmake = MakeExecutable('gmake', jobs)
+ m.scons = MakeExecutable('scons', jobs)
# easy shortcut to os.environ
m.env = os.environ
@@ -248,11 +338,8 @@ def set_module_variables_for_package(pkg, module):
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
- # TODO: shouldn't really use "which" here. Consider adding notion
- # TODO: of build dependencies, as opposed to link dependencies.
- # TODO: Currently, everything is a link dependency, but tools like
- # TODO: this shouldn't be.
m.cmake = Executable('cmake')
+ m.ctest = Executable('ctest')
# standard CMake arguments
m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
@@ -262,33 +349,34 @@ def set_module_variables_for_package(pkg, module):
# Set up CMake rpath
m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
- m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
+ m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' %
+ ":".join(get_rpaths(pkg)))
# Put spack compiler paths in module scope.
link_dir = spack.build_env_path
- m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
+ m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx'])
m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77'])
- m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
+ m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
# Emulate some shell commands for convenience
- m.pwd = os.getcwd
- m.cd = os.chdir
- m.mkdir = os.mkdir
- m.makedirs = os.makedirs
- m.remove = os.remove
- m.removedirs = os.removedirs
- m.symlink = os.symlink
-
- m.mkdirp = mkdirp
- m.install = install
+ m.pwd = os.getcwd
+ m.cd = os.chdir
+ m.mkdir = os.mkdir
+ m.makedirs = os.makedirs
+ m.remove = os.remove
+ m.removedirs = os.removedirs
+ m.symlink = os.symlink
+
+ m.mkdirp = mkdirp
+ m.install = install
m.install_tree = install_tree
- m.rmtree = shutil.rmtree
- m.move = shutil.move
+ m.rmtree = shutil.rmtree
+ m.move = shutil.move
# Useful directories within the prefix are encapsulated in
# a Prefix object.
- m.prefix = pkg.prefix
+ m.prefix = pkg.prefix
# Platform-specific library suffix.
m.dso_suffix = dso_suffix
@@ -297,30 +385,45 @@ def set_module_variables_for_package(pkg, module):
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
- rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
+ deps = pkg.spec.dependencies(deptype='link')
+ rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
+ rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
+ # Second module is our compiler mod name. We use that to get rpaths from
+ # module show output.
+ if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
+ rpaths.append(get_path_from_module(pkg.compiler.modules[1]))
return rpaths
def parent_class_modules(cls):
- """Get list of super class modules that are all descend from spack.Package"""
+ """
+ Get list of super class modules that are all descend from spack.Package
+ """
if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
return []
result = []
module = sys.modules.get(cls.__module__)
if module:
- result = [ module ]
+ result = [module]
for c in cls.__bases__:
result.extend(parent_class_modules(c))
return result
-def setup_package(pkg):
+def load_external_modules(pkg):
+ """ traverse the spec list and find any specs that have external modules.
+ """
+ for dep in list(pkg.spec.traverse()):
+ if dep.external_module:
+ load_module(dep.external_module)
+
+
+def setup_package(pkg, dirty=False):
"""Execute all environment setup routines."""
spack_env = EnvironmentModifications()
- run_env = EnvironmentModifications()
+ run_env = EnvironmentModifications()
# Before proceeding, ensure that specs and packages are consistent
#
@@ -336,14 +439,16 @@ def setup_package(pkg):
# throwaway environment, but it is kind of dirty.
#
# TODO: Think about how to avoid this fix and do something cleaner.
- for s in pkg.spec.traverse(): s.package.spec = s
+ for s in pkg.spec.traverse():
+ s.package.spec = s
set_compiler_environment_variables(pkg, spack_env)
- set_build_environment_variables(pkg, spack_env)
-
+ set_build_environment_variables(pkg, spack_env, dirty)
+ pkg.spec.architecture.platform.setup_platform_environment(pkg, spack_env)
+ load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
- for dspec in pkg.spec.traverse(order='post', root=False):
+ for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
# If a user makes their own package repo, e.g.
# spack.repos.mystuff.libelf.Libelf, and they inherit from
# an existing class like spack.repos.original.libelf.Libelf,
@@ -368,7 +473,7 @@ def setup_package(pkg):
spack_env.apply_modifications()
-def fork(pkg, function):
+def fork(pkg, function, dirty=False):
"""Fork a child process to do part of a spack build.
Arguments:
@@ -376,6 +481,7 @@ def fork(pkg, function):
pkg -- pkg whose environemnt we should set up the
forked process for.
function -- arg-less function to run in the child process.
+ dirty -- If True, do NOT clean the environment before building.
Usage:
def child_fun():
@@ -399,7 +505,7 @@ def fork(pkg, function):
if pid == 0:
# Give the child process the package's build environment.
- setup_package(pkg)
+ setup_package(pkg, dirty=dirty)
try:
# call the forked function.
@@ -424,7 +530,9 @@ def fork(pkg, function):
# message. Just make the parent exit with an error code.
pid, returncode = os.waitpid(pid, 0)
if returncode != 0:
- raise InstallError("Installation process had nonzero exit code.".format(str(returncode)))
+ message = "Installation process had nonzero exit code : {code}"
+ strcode = str(returncode)
+ raise InstallError(message.format(code=strcode))
class InstallError(spack.error.SpackError):
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 672999159c..f69f434afd 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -27,16 +27,18 @@ import re
import sys
import llnl.util.tty as tty
-from llnl.util.lang import attr_setdefault
-
import spack
-import spack.spec
import spack.config
+import spack.spec
+from llnl.util.lang import *
+from llnl.util.tty.colify import *
+from llnl.util.tty.color import *
#
# Settings for commands that modify configuration
#
-# Commands that modify confguration By default modify the *highest* priority scope.
+# Commands that modify confguration By default modify the *highest*
+# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name
# Commands that list confguration list *all* scopes by default.
default_list_scope = None
@@ -48,7 +50,7 @@ python_list = list
ignore_files = r'^\.|^__init__.py$|^#'
SETUP_PARSER = "setup_parser"
-DESCRIPTION = "description"
+DESCRIPTION = "description"
command_path = os.path.join(spack.lib_path, "spack", "cmd")
@@ -71,7 +73,7 @@ def get_module(name):
module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
level=0)
- attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
+ attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
fn_name = get_cmd_function_name(name)
@@ -101,17 +103,17 @@ def parse_specs(args, **kwargs):
specs = spack.spec.parse(args)
for spec in specs:
if concretize:
- spec.concretize() # implies normalize
+ spec.concretize() # implies normalize
elif normalize:
spec.normalize()
return specs
- except spack.parse.ParseError, e:
+ except spack.parse.ParseError as e:
tty.error(e.message, e.string, e.pos * " " + "^")
sys.exit(1)
- except spack.spec.SpecError, e:
+ except spack.spec.SpecError as e:
tty.error(e.message)
sys.exit(1)
@@ -127,7 +129,7 @@ def elide_list(line_list, max_num=10):
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
- return line_list[:max_num-1] + ['...'] + line_list[-1:]
+ return line_list[:max_num - 1] + ['...'] + line_list[-1:]
else:
return line_list
@@ -138,10 +140,104 @@ def disambiguate_spec(spec):
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
- args = ["%s matches multiple packages." % spec,
- "Matching packages:"]
+ args = ["%s matches multiple packages." % spec,
+ "Matching packages:"]
args += [" " + str(s) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
return matching_specs[0]
+
+
+def ask_for_confirmation(message):
+ while True:
+ tty.msg(message + '[y/n]')
+ choice = raw_input().lower()
+ if choice == 'y':
+ break
+ elif choice == 'n':
+ raise SystemExit('Operation aborted')
+ tty.warn('Please reply either "y" or "n"')
+
+
+def gray_hash(spec, length):
+ return colorize('@K{%s}' % spec.dag_hash(length))
+
+
+def display_specs(specs, **kwargs):
+ mode = kwargs.get('mode', 'short')
+ hashes = kwargs.get('long', False)
+ namespace = kwargs.get('namespace', False)
+ flags = kwargs.get('show_flags', False)
+ variants = kwargs.get('variants', False)
+
+ hlen = 7
+ if kwargs.get('very_long', False):
+ hashes = True
+ hlen = None
+
+ nfmt = '.' if namespace else '_'
+ ffmt = '$%+' if flags else ''
+ vfmt = '$+' if variants else ''
+ format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
+
+ # Make a dict with specs keyed by architecture and compiler.
+ index = index_by(specs, ('architecture', 'compiler'))
+
+ # Traverse the index and print out each package
+ for i, (architecture, compiler) in enumerate(sorted(index)):
+ if i > 0:
+ print
+
+ header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
+ architecture, spack.spec.compiler_color,
+ compiler)
+ tty.hline(colorize(header), char='-')
+
+ specs = index[(architecture, compiler)]
+ specs.sort()
+
+ abbreviated = [s.format(format_string, color=True) for s in specs]
+ if mode == 'paths':
+ # Print one spec per line along with prefix path
+ width = max(len(s) for s in abbreviated)
+ width += 2
+ format = " %%-%ds%%s" % width
+
+ for abbrv, spec in zip(abbreviated, specs):
+ if hashes:
+ print(gray_hash(spec, hlen), )
+ print(format % (abbrv, spec.prefix))
+
+ elif mode == 'deps':
+ for spec in specs:
+ print(spec.tree(
+ format=format_string,
+ color=True,
+ indent=4,
+ prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
+
+ elif mode == 'short':
+ # Print columns of output if not printing flags
+ if not flags:
+
+ def fmt(s):
+ string = ""
+ if hashes:
+ string += gray_hash(s, hlen) + ' '
+ string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
+
+ return string
+
+ colify(fmt(s) for s in specs)
+ # Print one entry per line if including flags
+ else:
+ for spec in specs:
+ # Print the hash if necessary
+ hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
+ print(hsh + spec.format(format_string, color=True) + '\n')
+
+ else:
+ raise ValueError(
+ "Invalid mode for display_specs: %s. Must be one of (paths,"
+ "deps, short)." % mode)
diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py
index 9867fa8835..797cdcb136 100644
--- a/lib/spack/spack/cmd/activate.py
+++ b/lib/spack/spack/cmd/activate.py
@@ -29,12 +29,14 @@ import spack.cmd
description = "Activate a package extension."
+
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Activate without first activating dependencies.")
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="spec of package extension to activate.")
def activate(parser, args):
diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py
index dc96dd0faa..1badd40f7f 100644
--- a/lib/spack/spack/cmd/arch.py
+++ b/lib/spack/spack/cmd/arch.py
@@ -22,14 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import spack
import spack.architecture as architecture
description = "Print the architecture for this machine"
+
def arch(parser, args):
- configured_sys_type = architecture.get_sys_type_from_spack_globals()
- if not configured_sys_type:
- configured_sys_type = "autodetect"
- print "Configured sys_type: %s" % configured_sys_type
- print "Autodetected default sys_type: %s" % architecture.sys_type()
+ print architecture.sys_type()
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index bec11439b5..60e2bd3a11 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from subprocess import check_call
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
@@ -31,26 +30,49 @@ from llnl.util.filesystem import join_path, mkdirp
import spack
from spack.util.executable import which
+_SPACK_UPSTREAM = 'https://github.com/llnl/spack'
+
description = "Create a new installation of spack in another prefix"
+
def setup_parser(subparser):
- subparser.add_argument('prefix', help="names of prefix where we should install spack")
+ subparser.add_argument(
+ '-r', '--remote', action='store', dest='remote',
+ help="name of the remote to bootstrap from", default='origin')
+ subparser.add_argument(
+ 'prefix',
+ help="names of prefix where we should install spack")
-def get_origin_url():
+def get_origin_info(remote):
git_dir = join_path(spack.prefix, '.git')
git = which('git', required=True)
- origin_url = git(
- '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
- output=str)
- return origin_url.strip()
+ try:
+ branch = git('symbolic-ref', '--short', 'HEAD', output=str)
+ except ProcessError:
+ branch = 'develop'
+ tty.warn('No branch found; using default branch: %s' % branch)
+ if remote == 'origin' and \
+ branch not in ('master', 'develop'):
+ branch = 'develop'
+ tty.warn('Unknown branch found; using default branch: %s' % branch)
+ try:
+ origin_url = git(
+ '--git-dir=%s' % git_dir,
+ 'config', '--get', 'remote.%s.url' % remote,
+ output=str)
+ except ProcessError:
+ origin_url = _SPACK_UPSTREAM
+ tty.warn('No git repository found; '
+ 'using default upstream URL: %s' % origin_url)
+ return (origin_url.strip(), branch.strip())
def bootstrap(parser, args):
- origin_url = get_origin_url()
+ origin_url, branch = get_origin_info(args.remote)
prefix = args.prefix
- tty.msg("Fetching spack from origin: %s" % origin_url)
+ tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))
if os.path.isfile(prefix):
tty.die("There is already a file at %s" % prefix)
@@ -62,7 +84,8 @@ def bootstrap(parser, args):
files_in_the_way = os.listdir(prefix)
if files_in_the_way:
- tty.die("There are already files there! Delete these files before boostrapping spack.",
+ tty.die("There are already files there! "
+ "Delete these files before boostrapping spack.",
*files_in_the_way)
tty.msg("Installing:",
@@ -73,8 +96,10 @@ def bootstrap(parser, args):
git = which('git', required=True)
git('init', '--shared', '-q')
git('remote', 'add', 'origin', origin_url)
- git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q')
- git('reset', '--hard', 'origin/master', '-q')
+ git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
+ '-n', '-q')
+ git('reset', '--hard', 'origin/%s' % branch, '-q')
+ git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
tty.msg("Successfully created a new spack in %s" % prefix,
"Run %s/bin/spack to use this installation." % prefix)
diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py
index aa45f67ae1..cf7232258c 100644
--- a/lib/spack/spack/cmd/cd.py
+++ b/lib/spack/spack/cmd/cd.py
@@ -25,7 +25,8 @@
import spack.cmd.location
import spack.modules
-description="cd to spack directories in the shell."
+description = "cd to spack directories in the shell."
+
def setup_parser(subparser):
"""This is for decoration -- spack cd is used through spack's
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 95bd4771ed..aedb0fd99c 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -42,7 +42,8 @@ def setup_parser(subparser):
'--keep-stage', action='store_true', dest='keep_stage',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
- 'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
+ 'versions', nargs=argparse.REMAINDER,
+ help='Versions to generate checksums for')
def get_checksums(versions, urls, **kwargs):
@@ -59,10 +60,10 @@ def get_checksums(versions, urls, **kwargs):
with Stage(url, keep=keep_stage) as stage:
stage.fetch()
if i == 0 and first_stage_function:
- first_stage_function(stage)
+ first_stage_function(stage, url)
- hashes.append((version,
- spack.util.crypto.checksum(hashlib.md5, stage.archive_file)))
+ hashes.append((version, spack.util.crypto.checksum(
+ hashlib.md5, stage.archive_file)))
i += 1
except FailedDownloadError as e:
tty.msg("Failed to fetch %s" % url)
@@ -79,12 +80,12 @@ def checksum(parser, args):
# If the user asked for specific versions, use those.
if args.versions:
versions = {}
- for v in args.versions:
- v = ver(v)
- if not isinstance(v, Version):
+ for version in args.versions:
+ version = ver(version)
+ if not isinstance(version, Version):
tty.die("Cannot generate checksums for version lists or " +
"version ranges. Use unambiguous versions.")
- versions[v] = pkg.url_for_version(v)
+ versions[version] = pkg.url_for_version(version)
else:
versions = pkg.fetch_remote_versions()
if not versions:
@@ -111,5 +112,7 @@ def checksum(parser, args):
if not version_hashes:
tty.die("Could not fetch any versions for %s" % pkg.name)
- version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
+ version_lines = [
+ " version('%s', '%s')" % (v, h) for v, h in version_hashes
+ ]
tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py
index 514c5874ef..dc62fbcaf6 100644
--- a/lib/spack/spack/cmd/clean.py
+++ b/lib/spack/spack/cmd/clean.py
@@ -31,6 +31,7 @@ import spack.cmd
description = "Remove build stage and source tarball for packages."
+
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean")
diff --git a/lib/spack/spack/cmd/common/__init__.py b/lib/spack/spack/cmd/common/__init__.py
new file mode 100644
index 0000000000..ed1ec23bca
--- /dev/null
+++ b/lib/spack/spack/cmd/common/__init__.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
new file mode 100644
index 0000000000..afcba33714
--- /dev/null
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -0,0 +1,96 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import argparse
+
+import spack.modules
+from spack.util.pattern import Bunch
+__all__ = ['add_common_arguments']
+
+_arguments = {}
+
+
+def add_common_arguments(parser, list_of_arguments):
+ for argument in list_of_arguments:
+ if argument not in _arguments:
+ message = 'Trying to add non existing argument "{0}" to a command'
+ raise KeyError(message.format(argument))
+ x = _arguments[argument]
+ parser.add_argument(*x.flags, **x.kwargs)
+
+
+class ConstraintAction(argparse.Action):
+ """Constructs a list of specs based on a constraint given on the command line
+
+ An instance of this class is supposed to be used as an argument action
+ in a parser. It will read a constraint and will attach a list of matching
+ specs to the namespace
+ """
+ qualifiers = {}
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ # Query specs from command line
+ d = self.qualifiers.get(namespace.subparser_name, {})
+ specs = [s for s in spack.installed_db.query(**d)]
+ values = ' '.join(values)
+ if values:
+ specs = [x for x in specs if x.satisfies(values, strict=True)]
+ namespace.specs = specs
+
+parms = Bunch(
+ flags=('constraint',),
+ kwargs={
+ 'nargs': '*',
+ 'help': 'Constraint to select a subset of installed packages',
+ 'action': ConstraintAction
+ })
+_arguments['constraint'] = parms
+
+parms = Bunch(
+ flags=('-m', '--module-type'),
+ kwargs={
+ 'help': 'Type of module files',
+ 'default': 'tcl',
+ 'choices': spack.modules.module_types
+ })
+_arguments['module_type'] = parms
+
+parms = Bunch(
+ flags=('-y', '--yes-to-all'),
+ kwargs={
+ 'action': 'store_true',
+ 'dest': 'yes_to_all',
+ 'help': 'Assume "yes" is the answer to every confirmation request.'
+ })
+_arguments['yes_to_all'] = parms
+
+parms = Bunch(
+ flags=('-r', '--dependencies'),
+ kwargs={
+ 'action': 'store_true',
+ 'dest': 'recurse_dependencies',
+ 'help': 'Recursively traverse spec dependencies'
+ })
+_arguments['recurse_dependencies'] = parms
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index dc7731a290..ea91c71479 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -37,6 +37,7 @@ from spack.util.environment import get_path
description = "Manage compilers"
+
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
@@ -44,43 +45,58 @@ def setup_parser(subparser):
scopes = spack.config.config_scopes
# Find
- find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
+ find_parser = sp.add_parser(
+ 'find', aliases=['add'],
+ help='Search the system for compilers to add to Spack configuration.')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
- find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
- help="Configuration scope to modify.")
+ find_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
# Remove
- remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.')
+ remove_parser = sp.add_parser(
+ 'remove', aliases=['rm'], help='Remove compiler by spec.')
remove_parser.add_argument(
- '-a', '--all', action='store_true', help='Remove ALL compilers that match spec.')
+ '-a', '--all', action='store_true',
+ help='Remove ALL compilers that match spec.')
remove_parser.add_argument('compiler_spec')
- remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
- help="Configuration scope to modify.")
+ remove_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
# List
list_parser = sp.add_parser('list', help='list available compilers')
- list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
- help="Configuration scope to read from.")
+ list_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
# Info
info_parser = sp.add_parser('info', help='Show compiler paths.')
info_parser.add_argument('compiler_spec')
- info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
- help="Configuration scope to read from.")
+ info_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
def compiler_find(args):
- """Search either $PATH or a list of paths for compilers and add them
- to Spack's configuration."""
+ """Search either $PATH or a list of paths OR MODULES for compilers and
+ add them to Spack's configuration.
+
+ """
paths = args.add_paths
if not paths:
paths = get_path('PATH')
- compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
- if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
-
+ # Don't initialize compilers config via compilers.get_compiler_config.
+ # Just let compiler_find do the
+ # entire process and return an empty config from all_compilers
+ # Default for any other process is init_config=True
+ compilers = [c for c in spack.compilers.find_compilers(*paths)
+ if c.spec not in spack.compilers.all_compilers(
+ scope=args.scope, init_config=False)]
if compilers:
- spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
+ spack.compilers.add_compilers_to_config(compilers, scope=args.scope,
+ init_config=False)
n = len(compilers)
s = 's' if n > 1 else ''
filename = spack.config.get_config_filename(args.scope, 'compilers')
@@ -93,17 +109,17 @@ def compiler_find(args):
def compiler_remove(args):
cspec = CompilerSpec(args.compiler_spec)
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
-
if not compilers:
tty.die("No compilers match spec %s" % cspec)
elif not args.all and len(compilers) > 1:
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
- tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.")
+ tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1)
for compiler in compilers:
- spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
+ spack.compilers.remove_compiler_from_config(
+ compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec)
@@ -121,13 +137,16 @@ def compiler_info(args):
print "\tcxx = %s" % c.cxx
print "\tf77 = %s" % c.f77
print "\tfc = %s" % c.fc
+ print "\tmodules = %s" % c.modules
+ print "\toperating system = %s" % c.operating_system
def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
for i, (name, compilers) in enumerate(index.items()):
- if i >= 1: print
+ if i >= 1:
+ print
cname = "%s{%s}" % (spack.spec.compiler_color, name)
tty.hline(colorize(cname), char='-')
@@ -135,10 +154,10 @@ def compiler_list(args):
def compiler(parser, args):
- action = { 'add' : compiler_find,
- 'find' : compiler_find,
- 'remove' : compiler_remove,
- 'rm' : compiler_remove,
- 'info' : compiler_info,
- 'list' : compiler_list }
+ action = {'add': compiler_find,
+ 'find': compiler_find,
+ 'remove': compiler_remove,
+ 'rm': compiler_remove,
+ 'info': compiler_info,
+ 'list': compiler_list}
action[args.compiler_command](args)
diff --git a/lib/spack/spack/cmd/compilers.py b/lib/spack/spack/cmd/compilers.py
index 9fbc2bb952..b87f977e5a 100644
--- a/lib/spack/spack/cmd/compilers.py
+++ b/lib/spack/spack/cmd/compilers.py
@@ -22,18 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import llnl.util.tty as tty
-from llnl.util.tty.colify import colify
-from llnl.util.lang import index_by
-
import spack
from spack.cmd.compiler import compiler_list
description = "List available compilers. Same as 'spack compiler list'."
+
def setup_parser(subparser):
subparser.add_argument('--scope', choices=spack.config.config_scopes,
help="Configuration scope to read/modify.")
+
def compilers(parser, args):
compiler_list(args)
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index d6f56c270d..c189e37036 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -22,15 +22,11 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-import argparse
-
-import llnl.util.tty as tty
-
import spack.config
description = "Get and set configuration options."
+
def setup_parser(subparser):
# User can only choose one
scope_group = subparser.add_mutually_exclusive_group()
@@ -64,6 +60,6 @@ def config_edit(args):
def config(parser, args):
- action = { 'get' : config_get,
- 'edit' : config_edit }
+ action = {'get': config_get,
+ 'edit': config_edit}
action[args.config_command](args)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 41bfa741f6..52a82eb38f 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -1,4 +1,3 @@
-_copyright = """\
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -23,10 +22,8 @@ _copyright = """\
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""
import string
import os
-import hashlib
import re
from ordereddict_backport import OrderedDict
@@ -41,16 +38,37 @@ import spack.util.web
from spack.spec import Spec
from spack.util.naming import *
from spack.repository import Repo, RepoError
-import spack.util.crypto as crypto
from spack.util.executable import which
-from spack.stage import Stage
description = "Create a new package file from an archive URL"
-package_template = string.Template(
- _copyright + """
+package_template = string.Template("""\
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
#
# This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled
@@ -68,25 +86,105 @@ package_template = string.Template(
#
from spack import *
+
class ${class_name}(Package):
- ""\"FIXME: put a proper description of your package here.""\"
- # FIXME: add a proper url for your package's homepage here.
+ ""\"FIXME: Put a proper description of your package here.""\"
+
+ # FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com"
url = "${url}"
${versions}
- # FIXME: Add dependencies if this package requires them.
- # depends_on("foo")
+${dependencies}
def install(self, spec, prefix):
+${install}
+""")
+
+# Build dependencies and extensions
+dependencies_dict = {
+ 'autotools': """\
+ # FIXME: Add dependencies if required.
+ # depends_on('foo')""",
+
+ 'cmake': """\
+ # FIXME: Add additional dependencies if required.
+ depends_on('cmake', type='build')""",
+
+ 'scons': """\
+ # FIXME: Add additional dependencies if required.
+ depends_on('scons', type='build')""",
+
+ 'python': """\
+ extends('python')
+
+ # FIXME: Add additional dependencies if required.
+ # depends_on('py-foo', type=nolink)""",
+
+ 'R': """\
+ extends('R')
+
+ # FIXME: Add additional dependencies if required.
+ # depends_on('r-foo', type=nolink)""",
+
+ 'octave': """\
+ extends('octave')
+
+ # FIXME: Add additional dependencies if required.
+ # depends_on('octave-foo', type=nolink)""",
+
+ 'unknown': """\
+ # FIXME: Add dependencies if required.
+ # depends_on('foo')"""
+}
+
+# Default installation instructions
+install_dict = {
+ 'autotools': """\
# FIXME: Modify the configure line to suit your build system here.
- ${configure}
+ configure('--prefix={0}'.format(prefix))
- # FIXME: Add logic to build and install here
+ # FIXME: Add logic to build and install here.
make()
- make("install")
-""")
+ make('install')""",
+
+ 'cmake': """\
+ with working_dir('spack-build', create=True):
+ # FIXME: Modify the cmake line to suit your build system here.
+ cmake('..', *std_cmake_args)
+
+ # FIXME: Add logic to build and install here.
+ make()
+ make('install')""",
+
+ 'scons': """\
+ # FIXME: Add logic to build and install here.
+ scons('prefix={0}'.format(prefix))
+ scons('install')""",
+
+ 'python': """\
+ # FIXME: Add logic to build and install here.
+ setup_py('install', '--prefix={0}'.format(prefix))""",
+
+ 'R': """\
+ # FIXME: Add logic to build and install here.
+ R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
+ self.stage.source_path)""",
+
+ 'octave': """\
+ # FIXME: Add logic to build and install here.
+ octave('--quiet', '--norc',
+ '--built-in-docstrings-file=/dev/null',
+ '--texi-macros-file=/dev/null',
+ '--eval', 'pkg prefix {0}; pkg install {1}'.format(
+ prefix, self.stage.archive_file))""",
+
+ 'unknown': """\
+ # FIXME: Unknown build system
+ make()
+ make('install')"""
+}
def make_version_calls(ver_hash_tuples):
@@ -118,41 +216,53 @@ def setup_parser(subparser):
setup_parser.subparser = subparser
-class ConfigureGuesser(object):
- def __call__(self, stage):
- """Try to guess the type of build system used by the project, and return
- an appropriate configure line.
- """
- autotools = "configure('--prefix=%s' % prefix)"
- cmake = "cmake('.', *std_cmake_args)"
- python = "python('setup.py', 'install', '--prefix=%s' % prefix)"
- r = "R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)"
-
- config_lines = ((r'/configure$', 'autotools', autotools),
- (r'/CMakeLists.txt$', 'cmake', cmake),
- (r'/setup.py$', 'python', python),
- (r'/NAMESPACE$', 'r', r))
-
- # Peek inside the tarball.
- tar = which('tar')
- output = tar(
- "--exclude=*/*/*", "-tf", stage.archive_file, output=str)
- lines = output.split("\n")
-
- # Set the configure line to the one that matched.
- for pattern, bs, cl in config_lines:
+class BuildSystemGuesser(object):
+
+ def __call__(self, stage, url):
+ """Try to guess the type of build system used by a project based on
+ the contents of its archive or the URL it was downloaded from."""
+
+ # Most octave extensions are hosted on Octave-Forge:
+ # http://octave.sourceforge.net/index.html
+ # They all have the same base URL.
+ if 'downloads.sourceforge.net/octave/' in url:
+ self.build_system = 'octave'
+ return
+
+ # A list of clues that give us an idea of the build system a package
+ # uses. If the regular expression matches a file contained in the
+ # archive, the corresponding build system is assumed.
+ clues = [
+ (r'/configure$', 'autotools'),
+ (r'/CMakeLists.txt$', 'cmake'),
+ (r'/SConstruct$', 'scons'),
+ (r'/setup.py$', 'python'),
+ (r'/NAMESPACE$', 'R')
+ ]
+
+ # Peek inside the compressed file.
+ if stage.archive_file.endswith('.zip'):
+ try:
+ unzip = which('unzip')
+ output = unzip('-l', stage.archive_file, output=str)
+ except:
+ output = ''
+ else:
+ try:
+ tar = which('tar')
+ output = tar('--exclude=*/*/*', '-tf',
+ stage.archive_file, output=str)
+ except:
+ output = ''
+ lines = output.split('\n')
+
+ # Determine the build system based on the files contained
+ # in the archive.
+ build_system = 'unknown'
+ for pattern, bs in clues:
if any(re.search(pattern, l) for l in lines):
- config_line = cl
build_system = bs
- break
- else:
- # None matched -- just put both, with cmake commented out
- config_line = "# FIXME: Spack couldn't guess one, so here are some options:\n"
- config_line += " # " + autotools + "\n"
- config_line += " # " + cmake
- build_system = 'unknown'
- self.configure = config_line
self.build_system = build_system
@@ -168,7 +278,7 @@ def guess_name_and_version(url, args):
else:
try:
name = spack.url.parse_name(url, version)
- except spack.url.UndetectableNameError, e:
+ except spack.url.UndetectableNameError:
# Use a user-supplied name if one is present
tty.die("Couldn't guess a name for this package. Try running:", "",
"spack create --name <name> <url>")
@@ -182,7 +292,8 @@ def guess_name_and_version(url, args):
def find_repository(spec, args):
# figure out namespace for spec
if spec.namespace and args.namespace and spec.namespace != args.namespace:
- tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace))
+ tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace,
+ args.namespace))
if not spec.namespace and args.namespace:
spec.namespace = args.namespace
@@ -193,8 +304,8 @@ def find_repository(spec, args):
try:
repo = Repo(repo_path)
if spec.namespace and spec.namespace != repo.namespace:
- tty.die("Can't create package with namespace %s in repo with namespace %s"
- % (spec.namespace, repo.namespace))
+ tty.die("Can't create package with namespace %s in repo with "
+ "namespace %s" % (spec.namespace, repo.namespace))
except RepoError as e:
tty.die(str(e))
else:
@@ -214,11 +325,7 @@ def find_repository(spec, args):
def fetch_tarballs(url, name, version):
"""Try to find versions of the supplied archive by scraping the web.
-
- Prompts the user to select how many to download if many are found.
-
-
- """
+ Prompts the user to select how many to download if many are found."""
versions = spack.util.web.find_versions_of_archive(url)
rkeys = sorted(versions.keys(), reverse=True)
versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
@@ -226,11 +333,11 @@ def fetch_tarballs(url, name, version):
archives_to_fetch = 1
if not versions:
# If the fetch failed for some reason, revert to what the user provided
- versions = { version : url }
+ versions = {version: url}
elif len(versions) > 1:
tty.msg("Found %s versions of %s:" % (len(versions), name),
*spack.cmd.elide_list(
- ["%-10s%s" % (v,u) for v, u in versions.iteritems()]))
+ ["%-10s%s" % (v, u) for v, u in versions.iteritems()]))
print
archives_to_fetch = tty.get_number(
"Include how many checksums in the package file?",
@@ -253,7 +360,7 @@ def create(parser, args):
# Figure out a name and repo for the package.
name, version = guess_name_and_version(url, args)
spec = Spec(name)
- name = spec.name # factors out namespace, if any
+ name = spec.name.lower() # factors out namespace, if any
repo = find_repository(spec, args)
tty.msg("This looks like a URL for %s version %s" % (name, version))
@@ -262,8 +369,8 @@ def create(parser, args):
# Fetch tarballs (prompting user if necessary)
versions, urls = fetch_tarballs(url, name, version)
- # Try to guess what configure system is used.
- guesser = ConfigureGuesser()
+ # Try to guess what build system is used.
+ guesser = BuildSystemGuesser()
ver_hash_tuples = spack.cmd.checksum.get_checksums(
versions, urls,
first_stage_function=guesser,
@@ -272,13 +379,13 @@ def create(parser, args):
if not ver_hash_tuples:
tty.die("Could not fetch any tarballs for %s" % name)
- # Prepend 'py-' to python package names, by convention.
+ # Add prefix to package name if it is an extension.
if guesser.build_system == 'python':
- name = 'py-%s' % name
-
- # Prepend 'r-' to R package names, by convention.
- if guesser.build_system == 'r':
- name = 'r-%s' % name
+ name = 'py-{0}'.format(name)
+ if guesser.build_system == 'R':
+ name = 'r-{0}'.format(name)
+ if guesser.build_system == 'octave':
+ name = 'octave-{0}'.format(name)
# Create a directory for the new package.
pkg_path = repo.filename_for_package_name(name)
@@ -292,10 +399,11 @@ def create(parser, args):
pkg_file.write(
package_template.substitute(
name=name,
- configure=guesser.configure,
class_name=mod_to_class(name),
url=url,
- versions=make_version_calls(ver_hash_tuples)))
+ versions=make_version_calls(ver_hash_tuples),
+ dependencies=dependencies_dict[guesser.build_system],
+ install=install_dict[guesser.build_system]))
# If everything checks out, go ahead and edit.
spack.editor(pkg_path)
diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py
index 990309ee48..2b15a0331e 100644
--- a/lib/spack/spack/cmd/deactivate.py
+++ b/lib/spack/spack/cmd/deactivate.py
@@ -31,6 +31,7 @@ from spack.graph import topological_sort
description = "Deactivate a package extension."
+
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
@@ -40,7 +41,8 @@ def setup_parser(subparser):
help="Deactivate all extensions of an extendable package, or "
"deactivate an extension AND its dependencies.")
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="spec of package extension to deactivate.")
def deactivate(parser, args):
@@ -65,7 +67,8 @@ def deactivate(parser, args):
if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec)
- tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
+ tty.msg("Deactivating %s and all dependencies." %
+ pkg.spec.short_spec)
topo_order = topological_sort(spec)
index = spec.index()
@@ -79,7 +82,9 @@ def deactivate(parser, args):
epkg.do_deactivate(force=args.force)
else:
- tty.die("spack deactivate --all requires an extendable package or an extension.")
+ tty.die(
+ "spack deactivate --all requires an extendable package "
+ "or an extension.")
else:
if not pkg.is_extension:
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index 78eb6847b8..7729105e62 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -31,9 +31,11 @@ import spack.cmd
description = "Show installed packages that depend on another."
+
def setup_parser(subparser):
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="specs to list dependencies of.")
def dependents(parser, args):
@@ -42,5 +44,6 @@ def dependents(parser, args):
tty.die("spack dependents takes only one spec.")
fmt = '$_$@$%@$+$=$#'
- deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
+ deps = [d.format(fmt, color=True)
+ for d in specs[0].package.installed_dependents]
tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)
diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py
index 39faf59a17..487654d261 100644
--- a/lib/spack/spack/cmd/diy.py
+++ b/lib/spack/spack/cmd/diy.py
@@ -35,6 +35,7 @@ from spack.stage import DIYStage
description = "Do-It-Yourself: build from an existing source directory."
+
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@@ -50,7 +51,7 @@ def setup_parser(subparser):
help="Do not display verbose build output while installing.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
- help="specs to use for install. Must contain package AND verison.")
+ help="specs to use for install. Must contain package AND version.")
def diy(self, args):
@@ -76,14 +77,17 @@ def diy(self, args):
return
if not spec.versions.concrete:
- tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?")
+ tty.die(
+ "spack diy spec must have a single, concrete version. "
+ "Did you forget a package version number?")
spec.concretize()
package = spack.repo.get(spec)
if package.installed:
tty.error("Already installed in %s" % package.prefix)
- tty.msg("Uninstall or try adding a version suffix for this DIY build.")
+ tty.msg("Uninstall or try adding a version suffix for this "
+ "DIY build.")
sys.exit(1)
# Forces the build to run out of the current directory.
diff --git a/lib/spack/spack/cmd/doc.py b/lib/spack/spack/cmd/doc.py
index b3d0737d13..291b17216f 100644
--- a/lib/spack/spack/cmd/doc.py
+++ b/lib/spack/spack/cmd/doc.py
@@ -25,6 +25,7 @@
description = "Run pydoc from within spack."
+
def setup_parser(subparser):
subparser.add_argument('entity', help="Run pydoc help on entity")
diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py
index 49ab83867a..286136dd67 100644
--- a/lib/spack/spack/cmd/edit.py
+++ b/lib/spack/spack/cmd/edit.py
@@ -68,7 +68,7 @@ def edit_package(name, repo_path, namespace, force=False):
if os.path.exists(path):
if not os.path.isfile(path):
tty.die("Something's wrong. '%s' is not a file!" % path)
- if not os.access(path, os.R_OK|os.W_OK):
+ if not os.access(path, os.R_OK | os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
elif not force:
tty.die("No package '%s'. Use spack create, or supply -f/--force "
@@ -93,19 +93,23 @@ def setup_parser(subparser):
# Various filetypes you can edit directly from the cmd line.
excl_args.add_argument(
'-c', '--command', dest='path', action='store_const',
- const=spack.cmd.command_path, help="Edit the command with the supplied name.")
+ const=spack.cmd.command_path,
+ help="Edit the command with the supplied name.")
excl_args.add_argument(
'-t', '--test', dest='path', action='store_const',
const=spack.test_path, help="Edit the test with the supplied name.")
excl_args.add_argument(
'-m', '--module', dest='path', action='store_const',
- const=spack.module_path, help="Edit the main spack module with the supplied name.")
+ const=spack.module_path,
+ help="Edit the main spack module with the supplied name.")
# Options for editing packages
excl_args.add_argument(
- '-r', '--repo', default=None, help="Path to repo to edit package in.")
+ '-r', '--repo', default=None,
+ help="Path to repo to edit package in.")
excl_args.add_argument(
- '-N', '--namespace', default=None, help="Namespace of package to edit.")
+ '-N', '--namespace', default=None,
+ help="Namespace of package to edit.")
subparser.add_argument(
'name', nargs='?', default=None, help="name of package to edit")
diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py
index 85d111e91e..f3bad039d4 100644
--- a/lib/spack/spack/cmd/env.py
+++ b/lib/spack/spack/cmd/env.py
@@ -28,11 +28,13 @@ import llnl.util.tty as tty
import spack.cmd
import spack.build_environment as build_env
-description = "Run a command with the environment for a particular spec's install."
+description = "Run a command with the install environment for a spec."
+
def setup_parser(subparser):
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="specs of package environment to emulate.")
def env(parser, args):
@@ -47,7 +49,7 @@ def env(parser, args):
if sep in args.spec:
s = args.spec.index(sep)
spec = args.spec[:s]
- cmd = args.spec[s+1:]
+ cmd = args.spec[s + 1:]
else:
spec = args.spec[0]
cmd = args.spec[1:]
diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py
index 11659e0c96..b5c484305f 100644
--- a/lib/spack/spack/cmd/extensions.py
+++ b/lib/spack/spack/cmd/extensions.py
@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
import argparse
import llnl.util.tty as tty
@@ -34,6 +33,7 @@ import spack.cmd.find
description = "List extensions for package."
+
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
@@ -47,7 +47,8 @@ def setup_parser(subparser):
help='Show full dependency DAG of extensions')
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to list extensions for')
def extensions(parser, args):
@@ -85,7 +86,8 @@ def extensions(parser, args):
#
# List specs of installed extensions.
#
- installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)]
+ installed = [
+ s.spec for s in spack.installed_db.installed_extensions_for(spec)]
print
if not installed:
tty.msg("None installed.")
@@ -102,4 +104,5 @@ def extensions(parser, args):
tty.msg("None activated.")
return
tty.msg("%d currently activated:" % len(activated))
- spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long)
+ spack.cmd.find.display_specs(
+ activated.values(), mode=args.mode, long=args.long)
diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py
index e40caaa234..c1ac2ed48d 100644
--- a/lib/spack/spack/cmd/fetch.py
+++ b/lib/spack/spack/cmd/fetch.py
@@ -29,16 +29,21 @@ import spack.cmd
description = "Fetch archives for packages"
+
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
- '-m', '--missing', action='store_true', help="Also fetch all missing dependencies")
+ '-m', '--missing', action='store_true',
+ help="Also fetch all missing dependencies")
subparser.add_argument(
- '-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
+ '-D', '--dependencies', action='store_true',
+ help="Also fetch all dependencies")
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
+ 'packages', nargs=argparse.REMAINDER,
+ help="specs of packages to fetch")
+
def fetch(parser, args):
if not args.packages:
@@ -50,8 +55,7 @@ def fetch(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
if args.missing or args.dependencies:
- to_fetch = set()
- for s in spec.traverse():
+ for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:
continue
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 93c10a910f..d3ea38c573 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -31,6 +31,7 @@ import spack.spec
from llnl.util.lang import *
from llnl.util.tty.colify import *
from llnl.util.tty.color import *
+from spack.cmd import display_specs
description = "Find installed spack packages"
@@ -85,6 +86,11 @@ def setup_parser(subparser):
action='store_true',
dest='missing',
help='Show missing dependencies as well as installed specs.')
+ subparser.add_argument(
+ '-v', '--variants',
+ action='store_true',
+ dest='variants',
+ help='Show variants in output (can be long)')
subparser.add_argument('-M', '--only-missing',
action='store_true',
dest='only_missing',
@@ -98,88 +104,6 @@ def setup_parser(subparser):
help='optional specs to filter results')
-def gray_hash(spec, length):
- return colorize('@K{%s}' % spec.dag_hash(length))
-
-
-def display_specs(specs, **kwargs):
- mode = kwargs.get('mode', 'short')
- hashes = kwargs.get('long', False)
- namespace = kwargs.get('namespace', False)
-
- hlen = 7
- if kwargs.get('very_long', False):
- hashes = True
- hlen = None
-
- nfmt = '.' if namespace else '_'
- format_string = '$%s$@$+' % nfmt
- flags = kwargs.get('show_flags', False)
- if flags:
- format_string = '$%s$@$%%+$+' % nfmt
-
- # Make a dict with specs keyed by architecture and compiler.
- index = index_by(specs, ('architecture', 'compiler'))
-
- # Traverse the index and print out each package
- for i, (architecture, compiler) in enumerate(sorted(index)):
- if i > 0:
- print
-
- header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
- architecture, spack.spec.compiler_color,
- compiler)
- tty.hline(colorize(header), char='-')
-
- specs = index[(architecture, compiler)]
- specs.sort()
-
- abbreviated = [s.format(format_string, color=True) for s in specs]
- if mode == 'paths':
- # Print one spec per line along with prefix path
- width = max(len(s) for s in abbreviated)
- width += 2
- format = " %%-%ds%%s" % width
-
- for abbrv, spec in zip(abbreviated, specs):
- if hashes:
- print(gray_hash(spec, hlen), )
- print(format % (abbrv, spec.prefix))
-
- elif mode == 'deps':
- for spec in specs:
- print(spec.tree(
- format=format_string,
- color=True,
- indent=4,
- prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
-
- elif mode == 'short':
- # Print columns of output if not printing flags
- if not flags:
-
- def fmt(s):
- string = ""
- if hashes:
- string += gray_hash(s, hlen) + ' '
- string += s.format('$-%s$@$+' % nfmt, color=True)
-
- return string
-
- colify(fmt(s) for s in specs)
- # Print one entry per line if including flags
- else:
- for spec in specs:
- # Print the hash if necessary
- hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
- print(hsh + spec.format(format_string, color=True) + '\n')
-
- else:
- raise ValueError(
- "Invalid mode for display_specs: %s. Must be one of (paths,"
- "deps, short)." % mode) # NOQA: ignore=E501
-
-
def query_arguments(args):
# Check arguments
if args.explicit and args.implicit:
@@ -236,4 +160,6 @@ def find(parser, args):
mode=args.mode,
long=args.long,
very_long=args.very_long,
- show_flags=args.show_flags)
+ show_flags=args.show_flags,
+ namespace=args.namespace,
+ variants=args.variants)
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index da65121836..8faabfbb7b 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -30,6 +30,7 @@ from spack.graph import *
description = "Generate graphs of package dependency relationships."
+
def setup_parser(subparser):
setup_parser.parser = subparser
@@ -42,10 +43,12 @@ def setup_parser(subparser):
help="Generate graph in dot format and print to stdout.")
subparser.add_argument(
- '--concretize', action='store_true', help="Concretize specs before graphing.")
+ '--concretize', action='store_true',
+ help="Concretize specs before graphing.")
subparser.add_argument(
- 'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.")
+ 'specs', nargs=argparse.REMAINDER,
+ help="specs of packages to graph.")
def graph(parser, args):
@@ -56,11 +59,11 @@ def graph(parser, args):
setup_parser.parser.print_help()
return 1
- if args.dot: # Dot graph only if asked for.
+ if args.dot: # Dot graph only if asked for.
graph_dot(*specs)
- elif specs: # ascii is default: user doesn't need to provide it explicitly
+ elif specs: # ascii is default: user doesn't need to provide it explicitly
graph_ascii(specs[0], debug=spack.debug)
for spec in specs[1:]:
- print # extra line bt/w independent graphs
+ print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)
diff --git a/lib/spack/spack/cmd/help.py b/lib/spack/spack/cmd/help.py
index 1d23161839..5bc8fc3e74 100644
--- a/lib/spack/spack/cmd/help.py
+++ b/lib/spack/spack/cmd/help.py
@@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-
description = "Get help on spack and its commands"
+
def setup_parser(subparser):
subparser.add_argument('help_command', nargs='?', default=None,
help='command to get help on')
+
def help(parser, args):
if args.help_command:
parser.parse_args([args.help_command, '-h'])
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 64d0d20e24..498518057b 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -29,9 +29,11 @@ import spack.fetch_strategy as fs
description = "Get detailed information on a particular package"
+
def padder(str_list, extra=0):
"""Return a function to pad elements of a list."""
length = max(len(str(s)) for s in str_list) + extra
+
def pad(string):
string = str(string)
padding = max(0, length - len(string))
@@ -40,7 +42,8 @@ def padder(str_list, extra=0):
def setup_parser(subparser):
- subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
+ subparser.add_argument(
+ 'name', metavar="PACKAGE", help="Name of package to get info for.")
def print_text_info(pkg):
@@ -81,12 +84,14 @@ def print_text_info(pkg):
print " " + fmt % (name, default, desc)
- print
- print "Dependencies:"
- if pkg.dependencies:
- colify(pkg.dependencies, indent=4)
- else:
- print " None"
+ for deptype in ('build', 'link', 'run'):
+ print
+ print "%s Dependencies:" % deptype.capitalize()
+ deps = pkg.dependencies_of_type(deptype)
+ if deps:
+ colify(deps, indent=4)
+ else:
+ print " None"
print
print "Virtual packages: "
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 9d3175786b..7663a97a28 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -31,6 +31,7 @@ import spack.cmd
description = "Build and install packages"
+
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@@ -52,9 +53,16 @@ def setup_parser(subparser):
help="Display verbose build output while installing.")
subparser.add_argument(
'--fake', action='store_true', dest='fake',
- help="Fake install. Just remove the prefix and touch a fake file in it.")
+ help="Fake install. Just remove prefix and create a fake file.")
+ subparser.add_argument(
+ '--dirty', action='store_true', dest='dirty',
+ help="Install a package *without* cleaning the environment.")
+ subparser.add_argument(
+ 'packages', nargs=argparse.REMAINDER,
+ help="specs of packages to install")
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
+ '--run-tests', action='store_true', dest='run_tests',
+ help="Run tests during installation of a package.")
def install(parser, args):
@@ -77,6 +85,8 @@ def install(parser, args):
keep_stage=args.keep_stage,
ignore_deps=args.ignore_deps,
make_jobs=args.jobs,
+ run_tests=args.run_tests,
verbose=args.verbose,
fake=args.fake,
+ dirty=args.dirty,
explicit=True)
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index 1e3699cee0..c921efd1bd 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -29,36 +29,62 @@ from llnl.util.tty.colify import colify
import spack
import fnmatch
+import re
+
+description = "List available spack packages"
-description ="List available spack packages"
def setup_parser(subparser):
subparser.add_argument(
'filter', nargs=argparse.REMAINDER,
help='Optional glob patterns to filter results.')
subparser.add_argument(
- '-i', '--insensitive', action='store_true', default=False,
- help='Filtering will be case insensitive.')
+ '-s', '--sensitive', action='store_true', default=False,
+ help='Use case-sensitive filtering. Default is case sensitive, '
+ 'unless the query contains a capital letter.')
+ subparser.add_argument(
+ '-d', '--search-description', action='store_true', default=False,
+ help='Filtering will also search the description for a match.')
def list(parser, args):
# Start with all package names.
- pkgs = spack.repo.all_package_names()
+ pkgs = set(spack.repo.all_package_names())
# filter if a filter arg was provided
if args.filter:
- def match(p, f):
- if args.insensitive:
- p = p.lower()
- f = f.lower()
- return fnmatch.fnmatchcase(p, f)
- pkgs = [p for p in pkgs if any(match(p, f) for f in args.filter)]
+ res = []
+ for f in args.filter:
+ if '*' not in f and '?' not in f:
+ r = fnmatch.translate('*' + f + '*')
+ else:
+ r = fnmatch.translate(f)
+
+ re_flags = re.I
+ if any(l.isupper for l in f) or args.sensitive:
+ re_flags = 0
+ rc = re.compile(r, flags=re_flags)
+ res.append(rc)
+
+ if args.search_description:
+ def match(p, f):
+ if f.match(p):
+ return True
+
+ pkg = spack.repo.get(p)
+ if pkg.__doc__:
+ return f.match(pkg.__doc__)
+ return False
+ else:
+ def match(p, f):
+ return f.match(p)
+ pkgs = [p for p in pkgs if any(match(p, f) for f in res)]
# sort before displaying.
- sorted_packages = sorted(pkgs, key=lambda s:s.lower())
+ sorted_packages = sorted(pkgs, key=lambda s: s.lower())
# Print all the package names in columns
- indent=0
+ indent = 0
if sys.stdout.isatty():
tty.msg("%d packages." % len(sorted_packages))
colify(sorted_packages, indent=indent)
diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py
index 54cf01eb43..85190a5d0b 100644
--- a/lib/spack/spack/cmd/load.py
+++ b/lib/spack/spack/cmd/load.py
@@ -25,13 +25,16 @@
import argparse
import spack.modules
-description ="Add package to environment using modules."
+description = "Add package to environment using modules."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to load with modules.')
+ 'spec', nargs=argparse.REMAINDER,
+ help="Spec of package to load with modules. "
+ "(If -, read specs from STDIN)")
def load(parser, args):
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index b0dbb1a550..b9c8b5c330 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -22,8 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
-import sys
import argparse
import llnl.util.tty as tty
@@ -32,16 +30,19 @@ from llnl.util.filesystem import join_path
import spack
import spack.cmd
-description="Print out locations of various directories used by Spack"
+description = "Print out locations of various directories used by Spack"
+
def setup_parser(subparser):
global directories
directories = subparser.add_mutually_exclusive_group()
directories.add_argument(
- '-m', '--module-dir', action='store_true', help="Spack python module directory.")
+ '-m', '--module-dir', action='store_true',
+ help="Spack python module directory.")
directories.add_argument(
- '-r', '--spack-root', action='store_true', help="Spack installation root.")
+ '-r', '--spack-root', action='store_true',
+ help="Spack installation root.")
directories.add_argument(
'-i', '--install-dir', action='store_true',
@@ -53,15 +54,19 @@ def setup_parser(subparser):
'-P', '--packages', action='store_true',
help="Top-level packages directory for Spack.")
directories.add_argument(
- '-s', '--stage-dir', action='store_true', help="Stage directory for a spec.")
+ '-s', '--stage-dir', action='store_true',
+ help="Stage directory for a spec.")
directories.add_argument(
- '-S', '--stages', action='store_true', help="Top level Stage directory.")
+ '-S', '--stages', action='store_true',
+ help="Top level Stage directory.")
directories.add_argument(
'-b', '--build-dir', action='store_true',
- help="Checked out or expanded source directory for a spec (requires it to be staged first).")
+ help="Checked out or expanded source directory for a spec "
+ "(requires it to be staged first).")
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="spec of package to fetch directory for.")
def location(parser, args):
@@ -104,9 +109,9 @@ def location(parser, args):
if args.stage_dir:
print pkg.stage.path
- else: # args.build_dir is the default.
+ else: # args.build_dir is the default.
if not pkg.stage.source_path:
- tty.die("Build directory does not exist yet. Run this to create it:",
+ tty.die("Build directory does not exist yet. "
+ "Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path
-
diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py
index 3ba3c71562..506cf0913f 100644
--- a/lib/spack/spack/cmd/md5.py
+++ b/lib/spack/spack/cmd/md5.py
@@ -36,7 +36,7 @@ description = "Calculate md5 checksums for files/urls."
def setup_parser(subparser):
setup_parser.parser = subparser
subparser.add_argument('files', nargs=argparse.REMAINDER,
- help="Files to checksum.")
+ help="Files/urls to checksum.")
def compute_md5_checksum(url):
@@ -67,6 +67,7 @@ def md5(parser, args):
tty.warn("%s" % e)
# Dump the MD5s at last without interleaving them with downloads
- tty.msg("%d MD5 checksums:" % len(results))
+ checksum = 'checksum' if len(results) == 1 else 'checksums'
+ tty.msg("%d MD5 %s:" % (len(results), checksum))
for checksum, url in results:
print "%s %s" % (checksum, url)
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index d5f7abe212..585faaf524 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import sys
from datetime import datetime
import argparse
@@ -40,6 +39,7 @@ from spack.util.spack_yaml import syaml_dict
description = "Manage mirrors."
+
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
@@ -61,8 +61,9 @@ def setup_parser(subparser):
'-D', '--dependencies', action='store_true',
help="Also fetch all dependencies")
create_parser.add_argument(
- '-o', '--one-version-per-spec', action='store_const', const=1, default=0,
- help="Only fetch one 'preferred' version per spec, not all known versions.")
+ '-o', '--one-version-per-spec', action='store_const',
+ const=1, default=0,
+ help="Only fetch one 'preferred' version per spec, not all known.")
scopes = spack.config.config_scopes
@@ -70,7 +71,7 @@ def setup_parser(subparser):
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
add_parser.add_argument('name', help="Mnemonic name for mirror.")
add_parser.add_argument(
- 'url', help="URL of mirror directory created by 'spack mirror create'.")
+ 'url', help="URL of mirror directory from 'spack mirror create'.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
@@ -107,7 +108,7 @@ def mirror_add(args):
tty.die("Mirror with url %s already exists." % url)
# should only be one item per mirror dict.
- items = [(n,u) for n,u in mirrors.items()]
+ items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (args.name, url))
mirrors = syaml_dict(items)
spack.config.update_config('mirrors', mirrors, scope=args.scope)
@@ -121,7 +122,7 @@ def mirror_remove(args):
if not mirrors:
mirrors = syaml_dict()
- if not name in mirrors:
+ if name not in mirrors:
tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name)
@@ -152,7 +153,7 @@ def _read_specs_from_file(filename):
s.package
specs.append(s)
except SpackError, e:
- tty.die("Parse error in %s, line %d:" % (args.file, i+1),
+ tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
">>> " + string, str(e))
return specs
@@ -179,7 +180,7 @@ def mirror_create(args):
new_specs = set()
for spec in specs:
spec.concretize()
- for s in spec.traverse():
+ for s in spec.traverse(deptype_query=spack.alldeps):
new_specs.add(s)
specs = list(new_specs)
@@ -214,10 +215,10 @@ def mirror_create(args):
def mirror(parser, args):
- action = { 'create' : mirror_create,
- 'add' : mirror_add,
- 'remove' : mirror_remove,
- 'rm' : mirror_remove,
- 'list' : mirror_list }
+ action = {'create': mirror_create,
+ 'add': mirror_add,
+ 'remove': mirror_remove,
+ 'rm': mirror_remove,
+ 'list': mirror_list}
action[args.mirror_command](args)
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 5292d42225..2d0b83fe00 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -22,83 +22,241 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
+import collections
import os
import shutil
import sys
import llnl.util.tty as tty
import spack.cmd
-from llnl.util.filesystem import mkdirp
+import spack.cmd.common.arguments as arguments
+import llnl.util.filesystem as filesystem
from spack.modules import module_types
-from spack.util.string import *
-description = "Manipulate modules and dotkits."
+description = "Manipulate module files"
+
+# Dictionary that will be populated with the list of sub-commands
+# Each sub-command must be callable and accept 3 arguments :
+# - mtype : the type of the module file
+# - specs : the list of specs to be processed
+# - args : namespace containing the parsed command line arguments
+callbacks = {}
+
+
+def subcommand(subparser_name):
+ """Registers a function in the callbacks dictionary"""
+ def decorator(callback):
+ callbacks[subparser_name] = callback
+ return callback
+ return decorator
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
- sp.add_parser('refresh', help='Regenerate all module files.')
+ # spack module refresh
+ refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
+ refresh_parser.add_argument(
+ '--delete-tree',
+ help='Delete the module file tree before refresh',
+ action='store_true'
+ )
+ arguments.add_common_arguments(
+ refresh_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
- find_parser = sp.add_parser('find', help='Find module files for packages.')
- find_parser.add_argument('module_type',
- help="Type of module to find file for. [" +
- '|'.join(module_types) + "]")
- find_parser.add_argument('spec',
- nargs='+',
- help='spec to find a module file for.')
+ # spack module find
+ find_parser = sp.add_parser('find', help='Find module files for packages')
+ arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
+ # spack module rm
+ rm_parser = sp.add_parser('rm', help='Remove module files')
+ arguments.add_common_arguments(
+ rm_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
-def module_find(mtype, spec_array):
- """Look at all installed packages and see if the spec provided
- matches any. If it does, check whether there is a module file
- of type <mtype> there, and print out the name that the user
- should type to use that package's module.
- """
- if mtype not in module_types:
- tty.die("Invalid module type: '%s'. Options are %s" %
- (mtype, comma_or(module_types)))
+ # spack module loads
+ loads_parser = sp.add_parser(
+ 'loads',
+ help='Prompt the list of modules associated with a constraint'
+ )
+ loads_parser.add_argument(
+ '--input-only', action='store_false', dest='shell',
+ help='Generate input for module command (instead of a shell script)'
+ )
+ loads_parser.add_argument(
+ '-p', '--prefix', dest='prefix', default='',
+ help='Prepend to module names when issuing module load commands'
+ )
+ arguments.add_common_arguments(
+ loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
+ )
- specs = spack.cmd.parse_specs(spec_array)
- if len(specs) > 1:
- tty.die("You can only pass one spec.")
- spec = specs[0]
- specs = spack.installed_db.query(spec)
+class MultipleMatches(Exception):
+ pass
+
+
+class NoMatch(Exception):
+ pass
+
+
+@subcommand('loads')
+def loads(mtype, specs, args):
+ """Prompt the list of modules associated with a list of specs"""
+ # Get a comprehensive list of specs
+ if args.recurse_dependencies:
+ specs_from_user_constraint = specs[:]
+ specs = []
+ # FIXME : during module file creation nodes seem to be visited
+ # FIXME : multiple times even if cover='nodes' is given. This
+ # FIXME : work around permits to get a unique list of spec anyhow.
+ # FIXME : (same problem as in spack/modules.py)
+ seen = set()
+ seen_add = seen.add
+ for spec in specs_from_user_constraint:
+ specs.extend(
+ [item for item in spec.traverse(order='post', cover='nodes')
+ if not (item in seen or seen_add(item))]
+ )
+
+ module_cls = module_types[mtype]
+ modules = [(spec, module_cls(spec).use_name)
+ for spec in specs if os.path.exists(module_cls(spec).file_name)]
+
+ module_commands = {
+ 'tcl': 'module load ',
+ 'dotkit': 'dotkit use '
+ }
+
+ d = {
+ 'command': '' if not args.shell else module_commands[mtype],
+ 'prefix': args.prefix
+ }
+
+ prompt_template = '{comment}{command}{prefix}{name}'
+ for spec, mod in modules:
+ d['comment'] = '' if not args.shell else '# {0}\n'.format(
+ spec.format())
+ d['name'] = mod
+ print(prompt_template.format(**d))
+
+
+@subcommand('find')
+def find(mtype, specs, args):
+ """
+ Look at all installed packages and see if the spec provided
+ matches any. If it does, check whether there is a module file
+ of type <mtype> there, and print out the name that the user
+ should type to use that package's module.
+ """
if len(specs) == 0:
- tty.die("No installed packages match spec %s" % spec)
+ raise NoMatch()
if len(specs) > 1:
- tty.error("Multiple matches for spec %s. Choose one:" % spec)
- for s in specs:
- sys.stderr.write(s.tree(color=True))
- sys.exit(1)
+ raise MultipleMatches()
- mt = module_types[mtype]
- mod = mt(specs[0])
+ spec = specs.pop()
+ mod = module_types[mtype](spec)
if not os.path.isfile(mod.file_name):
tty.die("No %s module is installed for %s" % (mtype, spec))
-
print(mod.use_name)
-def module_refresh():
- """Regenerate all module files for installed packages known to
- spack (some packages may no longer exist)."""
- specs = [s for s in spack.installed_db.query(installed=True, known=True)]
+@subcommand('rm')
+def rm(mtype, specs, args):
+ """Deletes module files associated with items in specs"""
+ module_cls = module_types[mtype]
+ specs_with_modules = [
+ spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
+ modules = [module_cls(spec) for spec in specs_with_modules]
+
+ if not modules:
+ tty.msg('No module file matches your query')
+ raise SystemExit(1)
+
+ # Ask for confirmation
+ if not args.yes_to_all:
+ tty.msg(
+ 'You are about to remove {0} module files the following specs:\n'
+ .format(mtype))
+ spack.cmd.display_specs(specs_with_modules, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
+
+ # Remove the module files
+ for s in modules:
+ s.remove()
+
+
+@subcommand('refresh')
+def refresh(mtype, specs, args):
+ """Regenerate module files for item in specs"""
+ # Prompt a message to the user about what is going to change
+ if not specs:
+ tty.msg('No package matches your query')
+ return
+
+ if not args.yes_to_all:
+ tty.msg(
+ 'You are about to regenerate {name} module files for:\n'
+ .format(name=mtype))
+ spack.cmd.display_specs(specs, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
+
+ cls = module_types[mtype]
+
+ # Detect name clashes
+ writers = [cls(spec) for spec in specs]
+ file2writer = collections.defaultdict(list)
+ for item in writers:
+ file2writer[item.file_name].append(item)
+
+ if len(file2writer) != len(writers):
+ message = 'Name clashes detected in module files:\n'
+ for filename, writer_list in file2writer.items():
+ if len(writer_list) > 1:
+ message += '\nfile : {0}\n'.format(filename)
+ for x in writer_list:
+ message += 'spec : {0}\n'.format(x.spec.format(color=True))
+ tty.error(message)
+ tty.error('Operation aborted')
+ raise SystemExit(1)
- for name, cls in module_types.items():
- tty.msg("Regenerating %s module files." % name)
- if os.path.isdir(cls.path):
- shutil.rmtree(cls.path, ignore_errors=False)
- mkdirp(cls.path)
- for spec in specs:
- cls(spec).write()
+ # Proceed regenerating module files
+ tty.msg('Regenerating {name} module files'.format(name=mtype))
+ if os.path.isdir(cls.path) and args.delete_tree:
+ shutil.rmtree(cls.path, ignore_errors=False)
+ filesystem.mkdirp(cls.path)
+ for x in writers:
+ x.write(overwrite=True)
def module(parser, args):
- if args.module_command == 'refresh':
- module_refresh()
+ # Qualifiers to be used when querying the db for specs
+ constraint_qualifiers = {
+ 'refresh': {
+ 'installed': True,
+ 'known': True
+ },
+ }
+ arguments.ConstraintAction.qualifiers.update(constraint_qualifiers)
- elif args.module_command == 'find':
- module_find(args.module_type, args.spec)
+ module_type = args.module_type
+ constraint = args.constraint
+ try:
+ callbacks[args.subparser_name](module_type, args.specs, args)
+ except MultipleMatches:
+ message = ('the constraint \'{query}\' matches multiple packages, '
+ 'and this is not allowed in this context')
+ tty.error(message.format(query=constraint))
+ for s in args.specs:
+ sys.stderr.write(s.format(color=True) + '\n')
+ raise SystemExit(1)
+ except NoMatch:
+ message = ('the constraint \'{query}\' match no package, '
+ 'and this is not allowed in this context')
+ tty.die(message.format(query=constraint))
diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py
index 6c5c4ae8c6..9ed42de823 100644
--- a/lib/spack/spack/cmd/package-list.py
+++ b/lib/spack/spack/cmd/package-list.py
@@ -22,10 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import re
import cgi
from StringIO import StringIO
-import llnl.util.tty as tty
from llnl.util.tty.colify import *
import spack
@@ -34,21 +32,22 @@ description = "Print a list of all packages in reStructuredText."
def github_url(pkg):
"""Link to a package file on github."""
- return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" %
- pkg.name)
+ url = "https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py"
+ return (url % pkg.name)
def rst_table(elts):
"""Print out a RST-style table."""
cols = StringIO()
ncol, widths = colify(elts, output=cols, tty=True)
- header = " ".join("=" * (w-1) for w in widths)
+ header = " ".join("=" * (w - 1) for w in widths)
return "%s\n%s%s" % (header, cols.getvalue(), header)
def print_rst_package_list():
"""Print out information on all packages in restructured text."""
- pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
+ pkgs = sorted(spack.repo.all_packages(), key=lambda s: s.name.lower())
+ pkg_names = [p.name for p in pkgs]
print ".. _package-list:"
print
@@ -62,7 +61,7 @@ def print_rst_package_list():
print "Spack currently has %d mainline packages:" % len(pkgs)
print
- print rst_table("`%s`_" % p.name for p in pkgs)
+ print rst_table("`%s`_" % p for p in pkg_names)
print
print "-----"
@@ -79,12 +78,17 @@ def print_rst_package_list():
print
if pkg.versions:
print "Versions:"
- print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
- if pkg.dependencies:
- print "Dependencies"
- print " " + ", ".join("`%s`_" % d if d != "mpi" else d
- for d in pkg.dependencies)
- print
+ print " " + ", ".join(str(v) for v in
+ reversed(sorted(pkg.versions)))
+
+ for deptype in spack.alldeps:
+ deps = pkg.dependencies_of_type(deptype)
+ if deps:
+ print "%s Dependencies" % deptype.capitalize()
+ print " " + ", ".join("%s_" % d if d in pkg_names
+ else d for d in deps)
+ print
+
print "Description:"
print pkg.format_doc(indent=2)
print
diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py
index a5507e42cf..9c72da40b5 100644
--- a/lib/spack/spack/cmd/patch.py
+++ b/lib/spack/spack/cmd/patch.py
@@ -29,14 +29,16 @@ import spack.cmd
import spack
-description="Patch expanded archive sources in preparation for install"
+description = "Patch expanded archive sources in preparation for install"
+
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
+ 'packages', nargs=argparse.REMAINDER,
+ help="specs of packages to stage")
def patch(parser, args):
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index a24c2759fe..7791b93cf5 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -33,6 +33,7 @@ from spack.util.executable import *
description = "Query packages associated with particular git revisions."
+
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command')
@@ -46,22 +47,28 @@ def setup_parser(subparser):
help="Revision to list packages for.")
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
- diff_parser.add_argument('rev1', nargs='?', default='HEAD^',
- help="Revision to compare against.")
- diff_parser.add_argument('rev2', nargs='?', default='HEAD',
- help="Revision to compare to rev1 (default is HEAD).")
+ diff_parser.add_argument(
+ 'rev1', nargs='?', default='HEAD^',
+ help="Revision to compare against.")
+ diff_parser.add_argument(
+ 'rev2', nargs='?', default='HEAD',
+ help="Revision to compare to rev1 (default is HEAD).")
add_parser = sp.add_parser('added', help=pkg_added.__doc__)
- add_parser.add_argument('rev1', nargs='?', default='HEAD^',
- help="Revision to compare against.")
- add_parser.add_argument('rev2', nargs='?', default='HEAD',
- help="Revision to compare to rev1 (default is HEAD).")
+ add_parser.add_argument(
+ 'rev1', nargs='?', default='HEAD^',
+ help="Revision to compare against.")
+ add_parser.add_argument(
+ 'rev2', nargs='?', default='HEAD',
+ help="Revision to compare to rev1 (default is HEAD).")
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
- rm_parser.add_argument('rev1', nargs='?', default='HEAD^',
- help="Revision to compare against.")
- rm_parser.add_argument('rev2', nargs='?', default='HEAD',
- help="Revision to compare to rev1 (default is HEAD).")
+ rm_parser.add_argument(
+ 'rev1', nargs='?', default='HEAD^',
+ help="Revision to compare against.")
+ rm_parser.add_argument(
+ 'rev2', nargs='?', default='HEAD',
+ help="Revision to compare to rev1 (default is HEAD).")
def get_git():
@@ -88,7 +95,8 @@ def pkg_add(args):
for pkg_name in args.packages:
filename = spack.repo.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
- tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
+ tty.die("No such package: %s. Path does not exist:" %
+ pkg_name, filename)
git = get_git()
git('-C', spack.packages_path, 'add', filename)
@@ -112,7 +120,8 @@ def pkg_diff(args):
if u1:
print "%s:" % args.rev1
colify(sorted(u1), indent=4)
- if u1: print
+ if u1:
+ print
if u2:
print "%s:" % args.rev2
@@ -122,19 +131,21 @@ def pkg_diff(args):
def pkg_removed(args):
"""Show packages removed since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
- if u1: colify(sorted(u1))
+ if u1:
+ colify(sorted(u1))
def pkg_added(args):
"""Show packages added since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
- if u2: colify(sorted(u2))
+ if u2:
+ colify(sorted(u2))
def pkg(parser, args):
- action = { 'add' : pkg_add,
- 'diff' : pkg_diff,
- 'list' : pkg_list,
- 'removed' : pkg_removed,
- 'added' : pkg_added }
+ action = {'add': pkg_add,
+ 'diff': pkg_diff,
+ 'list': pkg_list,
+ 'removed': pkg_removed,
+ 'added': pkg_added}
action[args.pkg_command](args)
diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py
index e9007486d2..0f4a97cc4a 100644
--- a/lib/spack/spack/cmd/providers.py
+++ b/lib/spack/spack/cmd/providers.py
@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
import argparse
from llnl.util.tty.colify import colify
@@ -30,11 +29,13 @@ from llnl.util.tty.colify import colify
import spack
import spack.cmd
-description ="List packages that provide a particular virtual package"
+description = "List packages that provide a particular virtual package"
+
def setup_parser(subparser):
- subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
- help='Find packages that provide this virtual package')
+ subparser.add_argument(
+ 'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
+ help='Find packages that provide this virtual package')
def providers(parser, args):
diff --git a/lib/spack/spack/cmd/purge.py b/lib/spack/spack/cmd/purge.py
index 7b33ef7f69..26d160635c 100644
--- a/lib/spack/spack/cmd/purge.py
+++ b/lib/spack/spack/cmd/purge.py
@@ -22,9 +22,37 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import spack
import spack.stage as stage
-description = "Remove all temporary build files and downloaded archives"
+description = "Remove temporary build files and/or downloaded archives"
+
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-s', '--stage', action='store_true', default=True,
+ help="Remove all temporary build stages (default).")
+ subparser.add_argument(
+ '-d', '--downloads', action='store_true',
+ help="Remove cached downloads.")
+ subparser.add_argument(
+ '-u', '--user-cache', action='store_true',
+ help="Remove caches in user home directory. Includes virtual indices.")
+ subparser.add_argument(
+ '-a', '--all', action='store_true',
+ help="Remove all of the above.")
+
def purge(parser, args):
- stage.purge()
+ # Special case: no flags.
+ if not any((args.stage, args.downloads, args.user_cache, args.all)):
+ stage.purge()
+ return
+
+ # handle other flags with fall through.
+ if args.stage or args.all:
+ stage.purge()
+ if args.downloads or args.all:
+ spack.fetch_cache.destroy()
+ if args.user_cache or args.all:
+ spack.user_cache.destroy()
diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py
index 59423271b9..12727cb599 100644
--- a/lib/spack/spack/cmd/python.py
+++ b/lib/spack/spack/cmd/python.py
@@ -30,18 +30,22 @@ import platform
import spack
+
def setup_parser(subparser):
subparser.add_argument(
'-c', dest='python_command', help='Command to execute.')
subparser.add_argument(
- 'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.")
+ 'python_args', nargs=argparse.REMAINDER,
+ help="File to run plus arguments.")
+
description = "Launch an interpreter as spack would launch a command"
+
def python(parser, args):
# Fake a main python shell by setting __name__ to __main__.
- console = code.InteractiveConsole({'__name__' : '__main__',
- 'spack' : spack})
+ console = code.InteractiveConsole({'__name__': '__main__',
+ 'spack': spack})
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
diff --git a/lib/spack/spack/cmd/reindex.py b/lib/spack/spack/cmd/reindex.py
index 93eba7a0f1..e37eebbd92 100644
--- a/lib/spack/spack/cmd/reindex.py
+++ b/lib/spack/spack/cmd/reindex.py
@@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
import spack
description = "Rebuild Spack's package database."
+
def reindex(parser, args):
spack.installed_db.reindex(spack.install_layout)
diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py
index 399237b169..5ab2ac0833 100644
--- a/lib/spack/spack/cmd/repo.py
+++ b/lib/spack/spack/cmd/repo.py
@@ -23,20 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import re
-import shutil
-from external import argparse
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path, mkdirp
import spack.spec
import spack.config
-from spack.util.environment import get_path
from spack.repository import *
description = "Manage package source repositories."
+
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.config_scopes
@@ -57,13 +53,15 @@ def setup_parser(subparser):
# Add
add_parser = sp.add_parser('add', help=repo_add.__doc__)
- add_parser.add_argument('path', help="Path to a Spack package repository directory.")
+ add_parser.add_argument(
+ 'path', help="Path to a Spack package repository directory.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
- remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm'])
+ remove_parser = sp.add_parser(
+ 'remove', help=repo_remove.__doc__, aliases=['rm'])
remove_parser.add_argument(
'path_or_namespace',
help="Path or namespace of a Spack package repository.")
@@ -100,7 +98,8 @@ def repo_add(args):
# If that succeeds, finally add it to the configuration.
repos = spack.config.get_config('repos', args.scope)
- if not repos: repos = []
+ if not repos:
+ repos = []
if repo.root in repos or path in repos:
tty.die("Repository is already registered with Spack: %s" % path)
@@ -135,7 +134,7 @@ def repo_remove(args):
tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace))
return
- except RepoError as e:
+ except RepoError:
continue
tty.die("No repository with path or namespace: %s"
@@ -149,7 +148,7 @@ def repo_list(args):
for r in roots:
try:
repos.append(Repo(r))
- except RepoError as e:
+ except RepoError:
continue
msg = "%d package repositor" % len(repos)
@@ -166,9 +165,9 @@ def repo_list(args):
def repo(parser, args):
- action = { 'create' : repo_create,
- 'list' : repo_list,
- 'add' : repo_add,
- 'remove' : repo_remove,
- 'rm' : repo_remove}
+ action = {'create': repo_create,
+ 'list': repo_list,
+ 'add': repo_add,
+ 'remove': repo_remove,
+ 'rm': repo_remove}
action[args.repo_command](args)
diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py
index 325d30662f..969afe09bd 100644
--- a/lib/spack/spack/cmd/restage.py
+++ b/lib/spack/spack/cmd/restage.py
@@ -31,6 +31,7 @@ import spack.cmd
description = "Revert checked out package source code."
+
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to restage")
diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py
new file mode 100644
index 0000000000..b55e102c0e
--- /dev/null
+++ b/lib/spack/spack/cmd/setup.py
@@ -0,0 +1,94 @@
+##############################################################################
+# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Written by Elizabeth Fischer
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License (as published by
+# the Free Software Foundation) version 2.1 dated February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import sys
+import os
+import argparse
+
+import llnl.util.tty as tty
+
+import spack
+import spack.cmd
+from spack.cmd.edit import edit_package
+from spack.stage import DIYStage
+
+description = "Create a configuration script and module, but don't build."
+
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
+ help="Do not try to install dependencies of requested packages.")
+ subparser.add_argument(
+ '-v', '--verbose', action='store_true', dest='verbose',
+ help="Display verbose build output while installing.")
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER,
+ help="specs to use for install. Must contain package AND version.")
+
+
+def setup(self, args):
+ if not args.spec:
+ tty.die("spack setup requires a package spec argument.")
+
+ specs = spack.cmd.parse_specs(args.spec)
+ if len(specs) > 1:
+ tty.die("spack setup only takes one spec.")
+
+ # Take a write lock before checking for existence.
+ with spack.installed_db.write_transaction():
+ spec = specs[0]
+ if not spack.repo.exists(spec.name):
+ tty.warn("No such package: %s" % spec.name)
+ create = tty.get_yes_or_no("Create this package?", default=False)
+ if not create:
+ tty.msg("Exiting without creating.")
+ sys.exit(1)
+ else:
+ tty.msg("Running 'spack edit -f %s'" % spec.name)
+ edit_package(spec.name, spack.repo.first_repo(), None, True)
+ return
+
+ if not spec.versions.concrete:
+ tty.die(
+ "spack setup spec must have a single, concrete version. "
+ "Did you forget a package version number?")
+
+ spec.concretize()
+ package = spack.repo.get(spec)
+
+ # It's OK if the package is already installed.
+
+ # Forces the build to run out of the current directory.
+ package.stage = DIYStage(os.getcwd())
+
+ # TODO: make this an argument, not a global.
+ spack.do_checksum = False
+
+ package.do_install(
+ keep_prefix=True, # Don't remove install directory
+ ignore_deps=args.ignore_deps,
+ verbose=args.verbose,
+ keep_stage=True, # don't remove source dir for SETUP.
+ install_phases=set(['setup', 'provenance']))
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index 321e3e429b..6e6d1c1277 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -25,23 +25,22 @@
import argparse
import spack.cmd
-import llnl.util.tty as tty
-
import spack
-import spack.url as url
description = "print out abstract and concrete versions of a spec."
+
def setup_parser(subparser):
subparser.add_argument('-i', '--ids', action='store_true',
help="show numerical ids for dependencies.")
- subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages")
+ subparser.add_argument(
+ 'specs', nargs=argparse.REMAINDER, help="specs of packages")
def spec(parser, args):
- kwargs = { 'ids' : args.ids,
- 'indent' : 2,
- 'color' : True }
+ kwargs = {'ids': args.ids,
+ 'indent': 2,
+ 'color': True}
for spec in spack.cmd.parse_specs(args.specs):
print "Input spec"
diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py
index 61e9c6d9ff..bfc2e5f456 100644
--- a/lib/spack/spack/cmd/stage.py
+++ b/lib/spack/spack/cmd/stage.py
@@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
-description="Expand downloaded archive in preparation for install"
+description = "Expand downloaded archive in preparation for install"
+
def setup_parser(subparser):
subparser.add_argument(
diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py
index 45592a7dda..8e7173e9a2 100644
--- a/lib/spack/spack/cmd/test-install.py
+++ b/lib/spack/spack/cmd/test-install.py
@@ -36,25 +36,25 @@ from llnl.util.filesystem import *
from spack.build_environment import InstallError
from spack.fetch_strategy import FetchError
-description = "Run package installation as a unit test, output formatted results."
+description = "Run package install as a unit test, output formatted results."
def setup_parser(subparser):
- subparser.add_argument('-j',
- '--jobs',
- action='store',
- type=int,
- help="Explicitly set number of make jobs. Default is #cpus.")
+ subparser.add_argument(
+ '-j', '--jobs', action='store', type=int,
+ help="Explicitly set number of make jobs. Default is #cpus.")
- subparser.add_argument('-n',
- '--no-checksum',
- action='store_true',
- dest='no_checksum',
- help="Do not check packages against checksum")
+ subparser.add_argument(
+ '-n', '--no-checksum', action='store_true', dest='no_checksum',
+ help="Do not check packages against checksum")
- subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
+ subparser.add_argument(
+ '-o', '--output', action='store',
+ help="test output goes in this file")
- subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
+ subparser.add_argument(
+ 'package', nargs=argparse.REMAINDER,
+ help="spec of package to install")
class TestResult(object):
@@ -65,6 +65,7 @@ class TestResult(object):
class TestSuite(object):
+
def __init__(self, filename):
self.filename = filename
self.root = ET.Element('testsuite')
@@ -75,14 +76,17 @@ class TestSuite(object):
def append(self, item):
if not isinstance(item, TestCase):
- raise TypeError('only TestCase instances may be appended to a TestSuite instance')
+ raise TypeError(
+ 'only TestCase instances may be appended to TestSuite')
self.tests.append(item) # Append the item to the list of tests
def __exit__(self, exc_type, exc_val, exc_tb):
# Prepare the header for the entire test suite
- number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
+ number_of_errors = sum(
+ x.result_type == TestResult.ERRORED for x in self.tests)
self.root.set('errors', str(number_of_errors))
- number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
+ number_of_failures = sum(
+ x.result_type == TestResult.FAILED for x in self.tests)
self.root.set('failures', str(number_of_failures))
self.root.set('tests', str(len(self.tests)))
@@ -112,7 +116,8 @@ class TestCase(object):
self.element.set('time', str(time))
self.result_type = None
- def set_result(self, result_type, message=None, error_type=None, text=None):
+ def set_result(self, result_type,
+ message=None, error_type=None, text=None):
self.result_type = result_type
result = TestCase.results[self.result_type]
if result is not None and result is not TestResult.PASSED:
@@ -133,7 +138,12 @@ def fetch_log(path):
def failed_dependencies(spec):
- return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
+ def get_deps(deptype):
+ return set(item for item in spec.dependencies(deptype)
+ if not spack.repo.get(item).installed)
+ link_deps = get_deps('link')
+ run_deps = get_deps('run')
+ return link_deps.union(run_deps)
def get_top_spec_or_die(args):
@@ -150,13 +160,19 @@ def install_single_spec(spec, number_of_jobs):
# If it is already installed, skip the test
if spack.repo.get(spec).installed:
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
- testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
+ testcase.set_result(
+ TestResult.SKIPPED,
+ message='Skipped [already installed]',
+ error_type='already_installed')
return testcase
# If it relies on dependencies that did not install, skip
if failed_dependencies(spec):
testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
- testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
+ testcase.set_result(
+ TestResult.SKIPPED,
+ message='Skipped [failed dependencies]',
+ error_type='dep_failed')
return testcase
# Otherwise try to install the spec
@@ -172,26 +188,30 @@ def install_single_spec(spec, number_of_jobs):
testcase = TestCase(package.name, package.spec.short_spec, duration)
testcase.set_result(TestResult.PASSED)
except InstallError:
- # An InstallError is considered a failure (the recipe didn't work correctly)
+ # An InstallError is considered a failure (the recipe didn't work
+ # correctly)
duration = time.time() - start_time
# Try to get the log
lines = fetch_log(package.build_log_path)
text = '\n'.join(lines)
testcase = TestCase(package.name, package.spec.short_spec, duration)
- testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
+ testcase.set_result(TestResult.FAILED,
+ message='Installation failure', text=text)
except FetchError:
# A FetchError is considered an error (we didn't even start building)
duration = time.time() - start_time
testcase = TestCase(package.name, package.spec.short_spec, duration)
- testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
+ testcase.set_result(TestResult.ERRORED,
+ message='Unable to fetch package')
return testcase
def get_filename(args, top_spec):
if not args.output:
- fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
+ fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(
+ x=top_spec, hash=top_spec.dag_hash())
output_directory = join_path(os.getcwd(), 'test-output')
if not os.path.exists(output_directory):
os.mkdir(output_directory)
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index cb9dd26c71..bf7342f606 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -23,33 +23,55 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from pprint import pprint
from llnl.util.filesystem import join_path, mkdirp
from llnl.util.tty.colify import colify
-from llnl.util.lang import list_modules
import spack
import spack.test
+from spack.fetch_strategy import FetchError
+
+description = "Run unit tests"
-description ="Run unit tests"
def setup_parser(subparser):
subparser.add_argument(
'names', nargs='*', help="Names of tests to run.")
subparser.add_argument(
- '-l', '--list', action='store_true', dest='list', help="Show available tests")
+ '-l', '--list', action='store_true', dest='list',
+ help="Show available tests")
subparser.add_argument(
- '--createXmlOutput', action='store_true', dest='createXmlOutput',
+ '--createXmlOutput', action='store_true', dest='createXmlOutput',
help="Create JUnit XML from test results")
subparser.add_argument(
- '--xmlOutputDir', dest='xmlOutputDir',
+ '--xmlOutputDir', dest='xmlOutputDir',
help="Nose creates XML files in this directory")
subparser.add_argument(
'-v', '--verbose', action='store_true', dest='verbose',
help="verbose output")
+class MockCache(object):
+
+ def store(self, copyCmd, relativeDst):
+ pass
+
+ def fetcher(self, targetPath, digest):
+ return MockCacheFetcher()
+
+
+class MockCacheFetcher(object):
+
+ def set_stage(self, stage):
+ pass
+
+ def fetch(self):
+ raise FetchError("Mock cache always fails for tests")
+
+ def __str__(self):
+ return "[mock fetcher]"
+
+
def test(parser, args):
if args.list:
print "Available tests:"
@@ -63,7 +85,8 @@ def test(parser, args):
outputDir = join_path(os.getcwd(), "test-output")
else:
outputDir = os.path.abspath(args.xmlOutputDir)
-
+
if not os.path.exists(outputDir):
mkdirp(outputDir)
+ spack.fetch_cache = MockCache()
spack.test.run(args.names, outputDir, args.verbose)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 9fdf3045b2..8957d1c908 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -30,7 +30,6 @@ import llnl.util.tty as tty
import spack
import spack.cmd
import spack.repository
-from spack.cmd.find import display_specs
description = "Remove an installed package"
@@ -39,51 +38,54 @@ error_message = """You can either:
b) use spack uninstall -a to uninstall ALL matching specs.
"""
-
-def ask_for_confirmation(message):
- while True:
- tty.msg(message + '[y/n]')
- choice = raw_input().lower()
- if choice == 'y':
- break
- elif choice == 'n':
- raise SystemExit('Operation aborted')
- tty.warn('Please reply either "y" or "n"')
+# Arguments for display_specs when we find ambiguity
+display_args = {
+ 'long': True,
+ 'show_flags': True,
+ 'variants': True
+}
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.")
+
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
- help="USE CAREFULLY. Remove ALL installed packages that match each " +
- "supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
- "libelf are uninstalled. This is both useful and dangerous, like rm -r.")
+ help="USE CAREFULLY. Remove ALL installed packages that match each "
+ "supplied spec. i.e., if you say uninstall libelf, ALL versions "
+ "of libelf are uninstalled. This is both useful and dangerous, "
+ "like rm -r.")
+
subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents',
- help='Also uninstall any packages that depend on the ones given via command line.'
- )
+ help='Also uninstall any packages that depend on the ones given '
+ 'via command line.')
+
subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
- help='Assume "yes" is the answer to every confirmation asked to the user.'
+ help='Assume "yes" is the answer to every confirmation requested')
- )
- subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
+ subparser.add_argument(
+ 'packages',
+ nargs=argparse.REMAINDER,
+ help="specs of packages to uninstall")
def concretize_specs(specs, allow_multiple_matches=False, force=False):
- """
- Returns a list of specs matching the non necessarily concretized specs given from cli
+ """Returns a list of specs matching the non necessarily
+ concretized specs given from cli
Args:
specs: list of specs to be matched against installed packages
- allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted)
+ allow_multiple_matches : if True multiple matches are admitted
Return:
list of specs
"""
- specs_from_cli = [] # List of specs that match expressions given via command line
+ # List of specs that match expressions given via command line
+ specs_from_cli = []
has_errors = False
for spec in specs:
matching = spack.installed_db.query(spec)
@@ -92,7 +94,7 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec)
print()
- display_specs(matching, long=True, show_flags=True)
+ spack.cmd.display_specs(matching, **display_args)
print()
has_errors = True
@@ -109,8 +111,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
def installed_dependents(specs):
- """
- Returns a dictionary that maps a spec with a list of its installed dependents
+ """Returns a dictionary that maps a spec with a list of its
+ installed dependents
Args:
specs: list of specs to be checked for dependents
@@ -140,7 +142,7 @@ def do_uninstall(specs, force):
try:
# should work if package is known to spack
packages.append(item.package)
- except spack.repository.UnknownPackageError as e:
+ except spack.repository.UnknownPackageError:
# The package.py file has gone away -- but still
# want to uninstall.
spack.Package(item).do_uninstall(force=True)
@@ -162,17 +164,20 @@ def uninstall(parser, args):
with spack.installed_db.write_transaction():
specs = spack.cmd.parse_specs(args.packages)
# Gets the list of installed specs that match the ones give via cli
- uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli
- dependent_list = installed_dependents(uninstall_list) # takes care of '-d'
+ # takes care of '-a' is given in the cli
+ uninstall_list = concretize_specs(specs, args.all, args.force)
+ dependent_list = installed_dependents(
+ uninstall_list) # takes care of '-d'
# Process dependent_list and update uninstall_list
has_error = False
if dependent_list and not args.dependents and not args.force:
for spec, lst in dependent_list.items():
- tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
+ tty.error("Will not uninstall %s" %
+ spec.format("$_$@$%@$#", color=True))
print('')
print("The following packages depend on it:")
- display_specs(lst, long=True)
+ spack.cmd.display_specs(lst, **display_args)
print('')
has_error = True
elif args.dependents:
@@ -181,14 +186,15 @@ def uninstall(parser, args):
uninstall_list = list(set(uninstall_list))
if has_error:
- tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well')
+ tty.die('You can use spack uninstall --dependents '
+ 'to uninstall these dependencies as well')
if not args.yes_to_all:
tty.msg("The following packages will be uninstalled : ")
print('')
- display_specs(uninstall_list, long=True, show_flags=True)
+ spack.cmd.display_specs(uninstall_list, **display_args)
print('')
- ask_for_confirmation('Do you want to proceed ? ')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
# Uninstall everything on the list
do_uninstall(uninstall_list, args.force)
diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py
index 7bd15750ed..b52bedb7b4 100644
--- a/lib/spack/spack/cmd/unload.py
+++ b/lib/spack/spack/cmd/unload.py
@@ -25,13 +25,15 @@
import argparse
import spack.modules
-description ="Remove package from environment using module."
+description = "Remove package from environment using module."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to unload with modules.')
def unload(parser, args):
diff --git a/lib/spack/spack/cmd/unuse.py b/lib/spack/spack/cmd/unuse.py
index 789a690e9c..6403cf6162 100644
--- a/lib/spack/spack/cmd/unuse.py
+++ b/lib/spack/spack/cmd/unuse.py
@@ -25,13 +25,15 @@
import argparse
import spack.modules
-description ="Remove package from environment using dotkit."
+description = "Remove package from environment using dotkit."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to unuse with dotkit.')
def unuse(parser, args):
diff --git a/lib/spack/spack/cmd/url-parse.py b/lib/spack/spack/cmd/url-parse.py
index ce12a17d13..b8c7c95040 100644
--- a/lib/spack/spack/cmd/url-parse.py
+++ b/lib/spack/spack/cmd/url-parse.py
@@ -22,28 +22,28 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-
import llnl.util.tty as tty
import spack
import spack.url
from spack.util.web import find_versions_of_archive
-description = "Show parsing of a URL, optionally spider web for other versions."
+description = "Show parsing of a URL, optionally spider web for versions."
+
def setup_parser(subparser):
subparser.add_argument('url', help="url of a package archive")
subparser.add_argument(
- '-s', '--spider', action='store_true', help="Spider the source page for versions.")
+ '-s', '--spider', action='store_true',
+ help="Spider the source page for versions.")
def print_name_and_version(url):
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
- underlines = [" "] * max(ns+nl, vs+vl)
- for i in range(ns, ns+nl):
+ underlines = [" "] * max(ns + nl, vs + vl)
+ for i in range(ns, ns + nl):
underlines[i] = '-'
- for i in range(vs, vs+vl):
+ for i in range(vs, vs + vl):
underlines[i] = '~'
print " %s" % url
diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py
index 2fe2019a22..f151581d7d 100644
--- a/lib/spack/spack/cmd/urls.py
+++ b/lib/spack/spack/cmd/urls.py
@@ -22,12 +22,12 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
import spack
import spack.url
description = "Inspect urls used by packages in spack."
+
def setup_parser(subparser):
subparser.add_argument(
'-c', '--color', action='store_true',
@@ -53,6 +53,7 @@ def urls(parser, args):
for url in sorted(urls):
if args.color or args.extrapolation:
- print spack.url.color_url(url, subs=args.extrapolation, errors=True)
+ print spack.url.color_url(
+ url, subs=args.extrapolation, errors=True)
else:
print url
diff --git a/lib/spack/spack/cmd/use.py b/lib/spack/spack/cmd/use.py
index bbb90fde1b..e3612ace48 100644
--- a/lib/spack/spack/cmd/use.py
+++ b/lib/spack/spack/cmd/use.py
@@ -25,13 +25,15 @@
import argparse
import spack.modules
-description ="Add package to environment using dotkit."
+description = "Add package to environment using dotkit."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to use with dotkit.')
def use(parser, args):
diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py
index ec3a4b2e34..1e95225ab8 100644
--- a/lib/spack/spack/cmd/versions.py
+++ b/lib/spack/spack/cmd/versions.py
@@ -22,15 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack
-description ="List available versions of a package"
+description = "List available versions of a package"
+
def setup_parser(subparser):
- subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for')
+ subparser.add_argument('package', metavar='PACKAGE',
+ help='Package to list versions for')
def versions(parser, args):
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 2ae305f201..a77991e4dc 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -25,21 +25,20 @@
import os
import re
import itertools
-from datetime import datetime
import llnl.util.tty as tty
-from llnl.util.lang import memoized
from llnl.util.filesystem import join_path
import spack.error
import spack.spec
+import spack.architecture
from spack.util.multiproc import parmap
from spack.util.executable import *
from spack.util.environment import get_path
-from spack.version import Version
__all__ = ['Compiler', 'get_compiler_version']
+
def _verify_executables(*paths):
for path in paths:
if not os.path.isfile(path) and os.access(path, os.X_OK):
@@ -48,8 +47,9 @@ def _verify_executables(*paths):
_version_cache = {}
+
def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
- if not compiler_path in _version_cache:
+ if compiler_path not in _version_cache:
compiler = Executable(compiler_path)
output = compiler(version_arg, output=str, error=str)
@@ -107,19 +107,27 @@ class Compiler(object):
@property
def fc_rpath_arg(self):
return '-Wl,-rpath,'
+ # Cray PrgEnv name that can be used to load this compiler
+ PrgEnv = None
+ # Name of module used to switch versions of this compiler
+ PrgEnv_compiler = None
-
- def __init__(self, cspec, cc, cxx, f77, fc, **kwargs):
+ def __init__(self, cspec, operating_system,
+ paths, modules=[], alias=None, **kwargs):
def check(exe):
if exe is None:
return None
_verify_executables(exe)
return exe
- self.cc = check(cc)
- self.cxx = check(cxx)
- self.f77 = check(f77)
- self.fc = check(fc)
+ self.cc = check(paths[0])
+ self.cxx = check(paths[1])
+ if len(paths) > 2:
+ self.f77 = check(paths[2])
+ if len(paths) == 3:
+ self.fc = self.f77
+ else:
+ self.fc = check(paths[3])
# Unfortunately have to make sure these params are accepted
# in the same order they are returned by sorted(flags)
@@ -130,8 +138,10 @@ class Compiler(object):
if value is not None:
self.flags[flag] = value.split()
+ self.operating_system = operating_system
self.spec = cspec
-
+ self.modules = modules
+ self.alias = alias
@property
def version(self):
@@ -142,31 +152,30 @@ class Compiler(object):
@property
def openmp_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- tty.die("The compiler you have chosen does not currently support OpenMP.",
- "If you think it should, please edit the compiler subclass and",
- "submit a pull request or issue.")
-
+ tty.die(
+ "The compiler you have chosen does not currently support OpenMP.",
+ "If you think it should, please edit the compiler subclass and",
+ "submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++11 is supported by that compiler
@property
def cxx11_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- tty.die("The compiler you have chosen does not currently support C++11.",
- "If you think it should, please edit the compiler subclass and",
- "submit a pull request or issue.")
-
+ tty.die(
+ "The compiler you have chosen does not currently support C++11.",
+ "If you think it should, please edit the compiler subclass and",
+ "submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++14 is supported by that compiler
@property
def cxx14_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- tty.die("The compiler you have chosen does not currently support C++14.",
- "If you think it should, please edit the compiler subclass and",
- "submit a pull request or issue.")
-
-
+ tty.die(
+ "The compiler you have chosen does not currently support C++14.",
+ "If you think it should, please edit the compiler subclass and",
+ "submit a pull request or issue.")
#
# Compiler classes have methods for querying the version of
@@ -175,7 +184,6 @@ class Compiler(object):
# Compiler *instances* are just data objects, and can only be
# constructed from an actual set of executables.
#
-
@classmethod
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
@@ -242,91 +250,46 @@ class Compiler(object):
version = detect_version(full_path)
return (version, prefix, suffix, full_path)
except ProcessError, e:
- tty.debug("Couldn't get version for compiler %s" % full_path, e)
+ tty.debug(
+ "Couldn't get version for compiler %s" % full_path, e)
return None
except Exception, e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
- tty.debug("Error while executing candidate compiler %s" % full_path,
- "%s: %s" %(e.__class__.__name__, e))
+ tty.debug("Error while executing candidate compiler %s"
+ % full_path,
+ "%s: %s" % (e.__class__.__name__, e))
return None
- successful = [key for key in parmap(check, checks) if key is not None]
+ successful = [k for k in parmap(check, checks) if k is not None]
+
# The 'successful' list is ordered like the input paths.
# Reverse it here so that the dict creation (last insert wins)
# does not spoil the intented precedence.
successful.reverse()
return dict(((v, p, s), path) for v, p, s, path in successful)
- @classmethod
- def find(cls, *path):
- """Try to find this type of compiler in the user's
- environment. For each set of compilers found, this returns
- compiler objects with the cc, cxx, f77, fc paths and the
- version filled in.
-
- This will search for compilers with the names in cc_names,
- cxx_names, etc. and it will group them if they have common
- prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
- be grouped with g++-mp-4.7 and gfortran-mp-4.7.
- """
- dicts = parmap(
- lambda t: cls._find_matches_in_path(*t),
- [(cls.cc_names, cls.cc_version) + tuple(path),
- (cls.cxx_names, cls.cxx_version) + tuple(path),
- (cls.f77_names, cls.f77_version) + tuple(path),
- (cls.fc_names, cls.fc_version) + tuple(path)])
-
- all_keys = set()
- for d in dicts:
- all_keys.update(d)
-
- compilers = {}
- for k in all_keys:
- ver, pre, suf = k
-
- # Skip compilers with unknown version.
- if ver == 'unknown':
- continue
-
- paths = tuple(pn[k] if k in pn else None for pn in dicts)
- spec = spack.spec.CompilerSpec(cls.name, ver)
-
- if ver in compilers:
- prev = compilers[ver]
-
- # prefer the one with more compilers.
- prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
- newcount = len([p for p in paths if p is not None])
- prevcount = len([p for p in prev_paths if p is not None])
-
- # Don't add if it's not an improvement over prev compiler.
- if newcount <= prevcount:
- continue
-
- compilers[ver] = cls(spec, *paths)
-
- return list(compilers.values())
-
-
def __repr__(self):
"""Return a string representation of the compiler toolchain."""
return self.__str__()
-
def __str__(self):
"""Return a string representation of the compiler toolchain."""
return "%s(%s)" % (
- self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
+ self.name, '\n '.join((str(s) for s in (
+ self.cc, self.cxx, self.f77, self.fc, self.modules,
+ str(self.operating_system)))))
class CompilerAccessError(spack.error.SpackError):
+
def __init__(self, path):
super(CompilerAccessError, self).__init__(
"'%s' is not a valid compiler." % path)
class InvalidCompilerError(spack.error.SpackError):
+
def __init__(self):
super(InvalidCompilerError, self).__init__(
"Compiler has no executables.")
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 7c951ae8bc..eb866c8bbb 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -26,10 +26,9 @@
system and configuring Spack to use multiple compilers.
"""
import imp
-import os
import platform
-from llnl.util.lang import memoized, list_modules
+from llnl.util.lang import list_modules
from llnl.util.filesystem import join_path
import spack
@@ -38,14 +37,12 @@ import spack.spec
import spack.config
import spack.architecture
-from spack.util.multiproc import parmap
-from spack.compiler import Compiler
-from spack.util.executable import which
from spack.util.naming import mod_to_class
-from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
-_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
+_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
+_other_instance_vars = ['modules', 'operating_system']
+_cache_config_file = []
# TODO: customize order in config file
if platform.system() == 'Darwin':
@@ -64,107 +61,111 @@ def _auto_compiler_spec(function):
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
- return {
- str(compiler.spec) : dict(
- (attr, getattr(compiler, attr, None))
- for attr in _required_instance_vars)
- }
+ d = {}
+ d['spec'] = str(compiler.spec)
+ d['paths'] = dict((attr, getattr(compiler, attr, None))
+ for attr in _path_instance_vars)
+ d['operating_system'] = str(compiler.operating_system)
+ d['modules'] = compiler.modules if compiler.modules else []
+ if compiler.alias:
+ d['alias'] = compiler.alias
-def get_compiler_config(arch=None, scope=None):
+ return {'compiler': d}
+
+
+def get_compiler_config(scope=None, init_config=True):
"""Return the compiler configuration for the specified architecture.
"""
- # Check whether we're on a front-end (native) architecture.
- my_arch = spack.architecture.sys_type()
- if arch is None:
- arch = my_arch
-
def init_compiler_config():
"""Compiler search used when Spack has no compilers."""
- config[arch] = {}
- compilers = find_compilers(*get_path('PATH'))
+ compilers = find_compilers()
+ compilers_dict = []
for compiler in compilers:
- config[arch].update(_to_dict(compiler))
- spack.config.update_config('compilers', config, scope=scope)
+ compilers_dict.append(_to_dict(compiler))
+ spack.config.update_config('compilers', compilers_dict, scope=scope)
config = spack.config.get_config('compilers', scope=scope)
-
# Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones.
- if arch == my_arch and arch not in config:
+ if not config and init_config:
if scope is None:
# We know no compilers were configured in any scope.
init_compiler_config()
+ config = spack.config.get_config('compilers', scope=scope)
elif scope == 'user':
# Check the site config and update the user config if
# nothing is configured at the site level.
site_config = spack.config.get_config('compilers', scope='site')
if not site_config:
init_compiler_config()
-
- return config[arch] if arch in config else {}
+ config = spack.config.get_config('compilers', scope=scope)
+ return config
+ elif config:
+ return config
+ else:
+ return [] # Return empty list which we will later append to.
-def add_compilers_to_config(compilers, arch=None, scope=None):
+def add_compilers_to_config(compilers, scope=None, init_config=True):
"""Add compilers to the config for the specified architecture.
Arguments:
- compilers: a list of Compiler objects.
- - arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
- if arch is None:
- arch = spack.architecture.sys_type()
-
- compiler_config = get_compiler_config(arch, scope)
+ compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
- compiler_config[str(compiler.spec)] = dict(
- (c, getattr(compiler, c, "None"))
- for c in _required_instance_vars)
-
- update = { arch : compiler_config }
- spack.config.update_config('compilers', update, scope)
+ compiler_config.append(_to_dict(compiler))
+ global _cache_config_file
+ _cache_config_file = compiler_config
+ spack.config.update_config('compilers', compiler_config, scope)
@_auto_compiler_spec
-def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
+def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from the config, by spec.
Arguments:
- compiler_specs: a list of CompilerSpec objects.
- - arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
- if arch is None:
- arch = spack.architecture.sys_type()
+ # Need a better way for this
+ global _cache_config_file
+
+ compiler_config = get_compiler_config(scope)
+ config_length = len(compiler_config)
- compiler_config = get_compiler_config(arch, scope)
- del compiler_config[str(compiler_spec)]
- update = { arch : compiler_config }
+ filtered_compiler_config = [
+ comp for comp in compiler_config
+ if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
- spack.config.update_config('compilers', update, scope)
+ # Update the cache for changes
+ _cache_config_file = filtered_compiler_config
+ if len(filtered_compiler_config) == config_length: # No items removed
+ CompilerSpecInsufficientlySpecificError(compiler_spec)
+ spack.config.update_config('compilers', filtered_compiler_config, scope)
-def all_compilers_config(arch=None, scope=None):
+def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
# Get compilers for this architecture.
- arch_config = get_compiler_config(arch, scope)
-
- # Merge 'all' compilers with arch-specific ones.
- # Arch-specific compilers have higher precedence.
- merged_config = get_compiler_config('all', scope=scope)
- merged_config = spack.config._merge_yaml(merged_config, arch_config)
-
- return merged_config
+ # Create a cache of the config file so we don't load all the time.
+ global _cache_config_file
+ if not _cache_config_file:
+ _cache_config_file = get_compiler_config(scope, init_config)
+ return _cache_config_file
+ else:
+ return _cache_config_file
-def all_compilers(arch=None, scope=None):
+def all_compilers(scope=None, init_config=True):
# Return compiler specs from the merged config.
- return [spack.spec.CompilerSpec(s)
- for s in all_compilers_config(arch, scope)]
+ return [spack.spec.CompilerSpec(s['compiler']['spec'])
+ for s in all_compilers_config(scope, init_config)]
def default_compiler():
@@ -179,36 +180,18 @@ def default_compiler():
return sorted(versions)[-1]
-def find_compilers(*path):
+def find_compilers(*paths):
"""Return a list of compilers found in the suppied paths.
- This invokes the find() method for each Compiler class,
- and appends the compilers detected to a list.
+ This invokes the find_compilers() method for each operating
+ system associated with the host platform, and appends
+ the compilers detected to a list.
"""
- # Make sure path elements exist, and include /bin directories
- # under prefixes.
- filtered_path = []
- for p in path:
- # Eliminate symlinks and just take the real directories.
- p = os.path.realpath(p)
- if not os.path.isdir(p):
- continue
- filtered_path.append(p)
-
- # Check for a bin directory, add it if it exists
- bin = join_path(p, 'bin')
- if os.path.isdir(bin):
- filtered_path.append(os.path.realpath(bin))
-
- # Once the paths are cleaned up, do a search for each type of
- # compiler. We can spawn a bunch of parallel searches to reduce
- # the overhead of spelunking all these directories.
- types = all_compiler_types()
- compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
-
- # ensure all the version calls we made are cached in the parent
- # process, as well. This speeds up Spack a lot.
- clist = reduce(lambda x,y: x+y, compiler_lists)
- return clist
+ # Find compilers for each operating system class
+ oss = all_os_classes()
+ compiler_lists = []
+ for o in oss:
+ compiler_lists.extend(o.find_compilers(*paths))
+ return compiler_lists
def supported_compilers():
@@ -227,51 +210,85 @@ def supported(compiler_spec):
@_auto_compiler_spec
-def find(compiler_spec, arch=None, scope=None):
+def find(compiler_spec, scope=None):
"""Return specs of available compilers that match the supplied
compiler spec. Return an list if nothing found."""
- return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
+ return [c for c in all_compilers(scope) if c.satisfies(compiler_spec)]
@_auto_compiler_spec
-def compilers_for_spec(compiler_spec, arch=None, scope=None):
+def compilers_for_spec(compiler_spec, scope=None, **kwargs):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
- config = all_compilers_config(arch, scope)
+ platform = kwargs.get("platform", None)
+ config = all_compilers_config(scope)
+
+ def get_compilers(cspec):
+ compilers = []
+
+ for items in config:
+ if items['compiler']['spec'] != str(cspec):
+ continue
+ items = items['compiler']
+
+ if not ('paths' in items and
+ all(n in items['paths'] for n in _path_instance_vars)):
+ raise InvalidCompilerConfigurationError(cspec)
+
+ cls = class_for_compiler_name(cspec.name)
+
+ compiler_paths = []
+ for c in _path_instance_vars:
+ compiler_path = items['paths'][c]
+ if compiler_path != "None":
+ compiler_paths.append(compiler_path)
+ else:
+ compiler_paths.append(None)
+
+ mods = items.get('modules')
+ if mods == 'None':
+ mods = []
+
+ if 'operating_system' in items:
+ os = spack.architecture._operating_system_from_dict(
+ items['operating_system'], platform)
+ else:
+ os = None
- def get_compiler(cspec):
- items = config[str(cspec)]
+ alias = items['alias'] if 'alias' in items else None
- if not all(n in items for n in _required_instance_vars):
- raise InvalidCompilerConfigurationError(cspec)
+ flags = {}
+ for f in spack.spec.FlagMap.valid_compiler_flags():
+ if f in items:
+ flags[f] = items[f]
- cls = class_for_compiler_name(cspec.name)
- compiler_paths = []
- for c in _required_instance_vars:
- compiler_path = items[c]
- if compiler_path != "None":
- compiler_paths.append(compiler_path)
- else:
- compiler_paths.append(None)
+ compilers.append(
+ cls(cspec, os, compiler_paths, mods, alias, **flags))
- flags = {}
- for f in spack.spec.FlagMap.valid_compiler_flags():
- if f in items:
- flags[f] = items[f]
- return cls(cspec, *compiler_paths, **flags)
+ return compilers
- matches = find(compiler_spec, arch, scope)
- return [get_compiler(cspec) for cspec in matches]
+ matches = set(find(compiler_spec, scope))
+ compilers = []
+ for cspec in matches:
+ compilers.extend(get_compilers(cspec))
+ return compilers
@_auto_compiler_spec
-def compiler_for_spec(compiler_spec):
+def compiler_for_spec(compiler_spec, arch):
"""Get the compiler that satisfies compiler_spec. compiler_spec must
be concrete."""
+ operating_system = arch.platform_os
assert(compiler_spec.concrete)
- compilers = compilers_for_spec(compiler_spec)
- assert(len(compilers) == 1)
+
+ compilers = [
+ c for c in compilers_for_spec(compiler_spec, platform=arch.platform)
+ if c.operating_system == operating_system]
+ if len(compilers) < 1:
+ raise NoCompilerForSpecError(compiler_spec, operating_system)
+ if len(compilers) > 1:
+ raise CompilerSpecInsufficientlySpecificError(compiler_spec)
return compilers[0]
@@ -289,18 +306,47 @@ def class_for_compiler_name(compiler_name):
return cls
+def all_os_classes():
+ """
+ Return the list of classes for all operating systems available on
+ this platform
+ """
+ classes = []
+
+ platform = spack.architecture.platform()
+ for os_class in platform.operating_sys.values():
+ classes.append(os_class)
+
+ return classes
+
+
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
class InvalidCompilerConfigurationError(spack.error.SpackError):
+
def __init__(self, compiler_spec):
super(InvalidCompilerConfigurationError, self).__init__(
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
- % _required_instance_vars)
+ % _path_instance_vars)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
- super(NoCompilersError, self).__init__("Spack could not find any compilers!")
+ super(NoCompilersError, self).__init__(
+ "Spack could not find any compilers!")
+
+
+class NoCompilerForSpecError(spack.error.SpackError):
+ def __init__(self, compiler_spec, target):
+ super(NoCompilerForSpecError, self).__init__(
+ "No compilers for operating system %s satisfy spec %s"
+ % (target, compiler_spec))
+
+
+class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
+ def __init__(self, compiler_spec):
+ super(CompilerSpecInsufficientlySpecificError, self).__init__(
+ "Multiple compilers satisfy spec %s" % compiler_spec)
diff --git a/lib/spack/spack/compilers/cce.py b/lib/spack/spack/compilers/cce.py
new file mode 100644
index 0000000000..43d000dd69
--- /dev/null
+++ b/lib/spack/spack/compilers/cce.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack.compiler import *
+
+
+class Cce(Compiler):
+ """Cray compiler environment compiler."""
+ # Subclasses use possible names of C compiler
+ cc_names = ['cc']
+
+ # Subclasses use possible names of C++ compiler
+ cxx_names = ['CC']
+
+ # Subclasses use possible names of Fortran 77 compiler
+ f77_names = ['ftn']
+
+ # Subclasses use possible names of Fortran 90 compiler
+ fc_names = ['ftn']
+
+ # MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
+ suffixes = [r'-mp-\d\.\d']
+
+ PrgEnv = 'PrgEnv-cray'
+ PrgEnv_compiler = 'cce'
+
+ link_paths = {'cc': 'cc',
+ 'cxx': 'c++',
+ 'f77': 'f77',
+ 'fc': 'fc'}
+
+ @classmethod
+ def default_version(cls, comp):
+ return get_compiler_version(comp, '-V', r'[Vv]ersion.*(\d+(\.\d+)+)')
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index 072bcd065f..4cf65222ae 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -29,6 +29,7 @@ from spack.util.executable import *
import llnl.util.tty as tty
from spack.version import ver
+
class Clang(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['clang']
@@ -43,11 +44,12 @@ class Clang(Compiler):
fc_names = []
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'clang/clang',
- 'cxx' : 'clang/clang++',
- # Use default wrappers for fortran, in case provided in compilers.yaml
- 'f77' : 'f77',
- 'fc' : 'f90' }
+ link_paths = {'cc': 'clang/clang',
+ 'cxx': 'clang/clang++',
+ # Use default wrappers for fortran, in case provided in
+ # compilers.yaml
+ 'f77': 'f77',
+ 'fc': 'f90'}
@property
def is_apple(self):
@@ -73,7 +75,7 @@ class Clang(Compiler):
return "-std=c++11"
@classmethod
- def default_version(self, comp):
+ def default_version(cls, comp):
"""The '--version' option works for clang compilers.
On most platforms, output looks like this::
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 164bddeb3f..a556f346d7 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -26,6 +26,7 @@ import llnl.util.tty as tty
from spack.compiler import *
from spack.version import ver
+
class Gcc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['gcc']
@@ -44,10 +45,13 @@ class Gcc(Compiler):
suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
# Named wrapper links within spack.build_env_path
- link_paths = {'cc' : 'gcc/gcc',
- 'cxx' : 'gcc/g++',
- 'f77' : 'gcc/gfortran',
- 'fc' : 'gcc/gfortran' }
+ link_paths = {'cc': 'gcc/gcc',
+ 'cxx': 'gcc/g++',
+ 'f77': 'gcc/gfortran',
+ 'fc': 'gcc/gfortran'}
+
+ PrgEnv = 'PrgEnv-gnu'
+ PrgEnv_compiler = 'gcc'
@property
def openmp_flag(self):
@@ -76,7 +80,6 @@ class Gcc(Compiler):
# older gfortran versions don't have simple dumpversion output.
r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
-
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 5007ece645..8531ecd19a 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -26,6 +26,7 @@ from spack.compiler import *
import llnl.util.tty as tty
from spack.version import ver
+
class Intel(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['icc']
@@ -40,10 +41,13 @@ class Intel(Compiler):
fc_names = ['ifort']
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'intel/icc',
- 'cxx' : 'intel/icpc',
- 'f77' : 'intel/ifort',
- 'fc' : 'intel/ifort' }
+ link_paths = {'cc': 'intel/icc',
+ 'cxx': 'intel/icpc',
+ 'f77': 'intel/ifort',
+ 'fc': 'intel/ifort'}
+
+ PrgEnv = 'PrgEnv-intel'
+ PrgEnv_compiler = 'intel'
@property
def openmp_flag(self):
@@ -61,7 +65,6 @@ class Intel(Compiler):
else:
return "-std=c++11"
-
@classmethod
def default_version(cls, comp):
"""The '--version' option seems to be the most consistent one
diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py
index cee11bc97a..fdfc078b5e 100644
--- a/lib/spack/spack/compilers/nag.py
+++ b/lib/spack/spack/compilers/nag.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
-import llnl.util.tty as tty
+
class Nag(Compiler):
# Subclasses use possible names of C compiler
@@ -39,11 +39,12 @@ class Nag(Compiler):
fc_names = ['nagfor']
# Named wrapper links within spack.build_env_path
- link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml
- 'cc' : 'cc',
- 'cxx' : 'c++',
- 'f77' : 'nag/nagfor',
- 'fc' : 'nag/nagfor' }
+ # Use default wrappers for C and C++, in case provided in compilers.yaml
+ link_paths = {
+ 'cc': 'cc',
+ 'cxx': 'c++',
+ 'f77': 'nag/nagfor',
+ 'fc': 'nag/nagfor'}
@property
def openmp_flag(self):
@@ -71,9 +72,8 @@ class Nag(Compiler):
"""The '-V' option works for nag compilers.
Output looks like this::
- NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
- Product NPL6A60NA for x86-64 Linux
- Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K.
+ NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
+ Product NPL6A60NA for x86-64 Linux
"""
return get_compiler_version(
comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)')
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index d42148dc49..0e4be6e9ba 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
-import llnl.util.tty as tty
+
class Pgi(Compiler):
# Subclasses use possible names of C compiler
@@ -39,10 +39,13 @@ class Pgi(Compiler):
fc_names = ['pgfortran', 'pgf95', 'pgf90']
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'pgi/pgcc',
- 'cxx' : 'pgi/pgc++',
- 'f77' : 'pgi/pgfortran',
- 'fc' : 'pgi/pgfortran' }
+ link_paths = {'cc': 'pgi/pgcc',
+ 'cxx': 'pgi/pgc++',
+ 'f77': 'pgi/pgfortran',
+ 'fc': 'pgi/pgfortran'}
+
+ PrgEnv = 'PrgEnv-pgi'
+ PrgEnv_compiler = 'pgi'
@property
def openmp_flag(self):
@@ -52,7 +55,6 @@ class Pgi(Compiler):
def cxx11_flag(self):
return "-std=c++11"
-
@classmethod
def default_version(cls, comp):
"""The '-V' option works for all the PGI compilers.
diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py
index bda2de4b87..5c83209781 100644
--- a/lib/spack/spack/compilers/xl.py
+++ b/lib/spack/spack/compilers/xl.py
@@ -26,24 +26,26 @@ from spack.compiler import *
import llnl.util.tty as tty
from spack.version import ver
+
class Xl(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['xlc','xlc_r']
+ cc_names = ['xlc', 'xlc_r']
# Subclasses use possible names of C++ compiler
- cxx_names = ['xlC','xlC_r','xlc++','xlc++_r']
+ cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r']
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['xlf','xlf_r']
+ f77_names = ['xlf', 'xlf_r']
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r']
+ fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r',
+ 'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r']
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'xl/xlc',
- 'cxx' : 'xl/xlc++',
- 'f77' : 'xl/xlf',
- 'fc' : 'xl/xlf90' }
+ link_paths = {'cc': 'xl/xlc',
+ 'cxx': 'xl/xlc++',
+ 'f77': 'xl/xlf',
+ 'fc': 'xl/xlf90'}
@property
def openmp_flag(self):
@@ -57,7 +59,7 @@ class Xl(Compiler):
return "-qlanglvl=extended0x"
@classmethod
- def default_version(self, comp):
+ def default_version(cls, comp):
"""The '-qversion' is the standard option fo XL compilers.
Output looks like this::
@@ -81,28 +83,28 @@ class Xl(Compiler):
"""
return get_compiler_version(
- comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
+ comp, '-qversion', r'([0-9]?[0-9]\.[0-9])')
@classmethod
def fc_version(cls, fc):
- """The fortran and C/C++ versions of the XL compiler are always two units apart.
- By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1.
- Having such a difference in version number is confusing spack quite a lot.
- Most notably if you keep the versions as is the default xl compiler will only
- have fortran and no C/C++.
- So we associate the Fortran compiler with the version associated to the C/C++
- compiler.
- One last stumble. Version numbers over 10 have at least a .1 those under 10
- a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can
- such a compiler mix and possibly older version of AIX and linux on power.
+ """The fortran and C/C++ versions of the XL compiler are always
+ two units apart. By this we mean that the fortran release that
+ goes with XL C/C++ 11.1 is 13.1. Having such a difference in
+ version number is confusing spack quite a lot. Most notably
+ if you keep the versions as is the default xl compiler will
+ only have fortran and no C/C++. So we associate the Fortran
+ compiler with the version associated to the C/C++ compiler.
+ One last stumble. Version numbers over 10 have at least a .1
+ those under 10 a .0. There is no xlf 9.x or under currently
+ available. BG/P and BG/L can such a compiler mix and possibly
+ older version of AIX and linux on power.
"""
- fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])')
+ fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])')
cver = float(fver) - 2
- if cver < 10 :
- cver = cver - 0.1
+ if cver < 10:
+ cver = cver - 0.1
return str(cver)
-
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index 4f78bfc347..726dee62e3 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -40,12 +40,12 @@ import spack.architecture
import spack.error
from spack.version import *
from functools import partial
-from spec import DependencyMap
from itertools import chain
from spack.config import *
class DefaultConcretizer(object):
+
"""This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the
default concretization strategies, or you can override all of them.
@@ -61,14 +61,19 @@ class DefaultConcretizer(object):
if not providers:
raise UnsatisfiableProviderSpecError(providers[0], spec)
spec_w_preferred_providers = find_spec(
- spec, lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name))
+ spec,
+ lambda x: spack.pkgsort.spec_has_preferred_provider(
+ x.name, spec.name))
if not spec_w_preferred_providers:
spec_w_preferred_providers = spec
- provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name)
+ provider_cmp = partial(spack.pkgsort.provider_compare,
+ spec_w_preferred_providers.name,
+ spec.name)
candidates = sorted(providers, cmp=provider_cmp)
- # For each candidate package, if it has externals, add those to the usable list.
- # if it's not buildable, then *only* add the externals.
+ # For each candidate package, if it has externals, add those
+ # to the usable list. if it's not buildable, then *only* add
+ # the externals.
usable = []
for cspec in candidates:
if is_spec_buildable(cspec):
@@ -84,7 +89,8 @@ class DefaultConcretizer(object):
raise NoBuildError(spec)
def cmp_externals(a, b):
- if a.name != b.name:
+ if a.name != b.name and (not a.external or a.external_module and
+ not b.external and b.external_module):
# We're choosing between different providers, so
# maintain order from provider sort
return candidates.index(a) - candidates.index(b)
@@ -102,7 +108,7 @@ class DefaultConcretizer(object):
usable.sort(cmp=cmp_externals)
return usable
-
+ # XXX(deptypes): Look here.
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
@@ -113,26 +119,26 @@ class DefaultConcretizer(object):
# Find the nearest spec in the dag that has a compiler. We'll
# use that spec to calibrate compiler compatibility.
- abi_exemplar = find_spec(spec, lambda(x): x.compiler)
+ abi_exemplar = find_spec(spec, lambda x: x.compiler)
if not abi_exemplar:
abi_exemplar = spec.root
# Make a list including ABI compatibility of specs with the exemplar.
strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates]
- loose = [spack.abi.compatible(c, abi_exemplar, loose=True) for c in candidates]
+ loose = [spack.abi.compatible(c, abi_exemplar, loose=True)
+ for c in candidates]
keys = zip(strict, loose, candidates)
# Sort candidates from most to least compatibility.
# Note:
# 1. We reverse because True > False.
# 2. Sort is stable, so c's keep their order.
- keys.sort(key=lambda k:k[:2], reverse=True)
+ keys.sort(key=lambda k: k[:2], reverse=True)
# Pull the candidates back out and return them in order
- candidates = [c for s,l,c in keys]
+ candidates = [c for s, l, c in keys]
return candidates
-
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
the preferred version from spackconfig, and default to the package's
@@ -165,7 +171,12 @@ class DefaultConcretizer(object):
valid_versions.sort(key=prefer_key, reverse=True)
if valid_versions:
- spec.versions = ver([valid_versions[0]])
+ # Disregard @develop and take the next valid version
+ if ver(valid_versions[0]) == ver('develop') and \
+ len(valid_versions) > 1:
+ spec.versions = ver([valid_versions[1]])
+ else:
+ spec.versions = ver([valid_versions[0]])
else:
# We don't know of any SAFE versions that match the given
# spec. Grab the spec's versions and grab the highest
@@ -187,45 +198,87 @@ class DefaultConcretizer(object):
return True # Things changed
+ def _concretize_operating_system(self, spec):
+ if spec.architecture.platform_os is not None and isinstance(
+ spec.architecture.platform_os,
+ spack.architecture.OperatingSystem):
+ return False
- def concretize_architecture(self, spec):
- """If the spec already had an architecture, return. Otherwise if
- the root of the DAG has an architecture, then use that.
- Otherwise take the system's default architecture.
-
- Intuition: Architectures won't be set a lot, and generally you
- want the host system's architecture. When architectures are
- mised in a spec, it is likely because the tool requries a
- cross-compiled component, e.g. for tools that run on BlueGene
- or Cray machines. These constraints will likely come directly
- from packages, so require the user to be explicit if they want
- to mess with the architecture, and revert to the default when
- they're not explicit.
- """
- if spec.architecture is not None:
+ if spec.root.architecture and spec.root.architecture.platform_os:
+ if isinstance(spec.root.architecture.platform_os,
+ spack.architecture.OperatingSystem):
+ spec.architecture.platform_os = \
+ spec.root.architecture.platform_os
+ else:
+ spec.architecture.platform_os = \
+ spec.architecture.platform.operating_system('default_os')
+ return True # changed
+
+ def _concretize_target(self, spec):
+ if spec.architecture.target is not None and isinstance(
+ spec.architecture.target, spack.architecture.Target):
return False
+ if spec.root.architecture and spec.root.architecture.target:
+ if isinstance(spec.root.architecture.target,
+ spack.architecture.Target):
+ spec.architecture.target = spec.root.architecture.target
+ else:
+ spec.architecture.target = spec.architecture.platform.target(
+ 'default_target')
+ return True # changed
- if spec.root.architecture:
- spec.architecture = spec.root.architecture
+ def _concretize_platform(self, spec):
+ if spec.architecture.platform is not None and isinstance(
+ spec.architecture.platform, spack.architecture.Platform):
+ return False
+ if spec.root.architecture and spec.root.architecture.platform:
+ if isinstance(spec.root.architecture.platform,
+ spack.architecture.Platform):
+ spec.architecture.platform = spec.root.architecture.platform
else:
- spec.architecture = spack.architecture.sys_type()
+ spec.architecture.platform = spack.architecture.platform()
+ return True # changed?
- assert(spec.architecture is not None)
- return True # changed
+ def concretize_architecture(self, spec):
+ """If the spec is empty provide the defaults of the platform. If the
+ architecture is not a basestring, then check if either the platform,
+ target or operating system are concretized. If any of the fields are
+ changed then return True. If everything is concretized (i.e the
+ architecture attribute is a namedtuple of classes) then return False.
+ If the target is a string type, then convert the string into a
+ concretized architecture. If it has no architecture and the root of the
+ DAG has an architecture, then use the root otherwise use the defaults
+ on the platform.
+ """
+ if spec.architecture is None:
+ # Set the architecture to all defaults
+ spec.architecture = spack.architecture.Arch()
+ return True
+ # Concretize the operating_system and target based of the spec
+ ret = any((self._concretize_platform(spec),
+ self._concretize_operating_system(spec),
+ self._concretize_target(spec)))
+ return ret
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
- the default variants from the package specification.
+ the user preferences from packages.yaml or the default variants from
+ the package specification.
"""
changed = False
+ preferred_variants = spack.pkgsort.spec_preferred_variants(
+ spec.package_class.name)
for name, variant in spec.package_class.variants.items():
if name not in spec.variants:
- spec.variants[name] = spack.spec.VariantSpec(name, variant.default)
changed = True
+ if name in preferred_variants:
+ spec.variants[name] = preferred_variants.get(name)
+ else:
+ spec.variants[name] = \
+ spack.spec.VariantSpec(name, variant.default)
return changed
-
def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler
@@ -238,15 +291,35 @@ class DefaultConcretizer(object):
build with the compiler that will be used by libraries that
link to this one, to maximize compatibility.
"""
+ # Pass on concretizing the compiler if the target is not yet determined
+ if not spec.architecture.platform_os:
+ # Although this usually means changed, this means awaiting other
+ # changes
+ return True
+
+ # Only use a matching compiler if it is of the proper style
+ # Takes advantage of the proper logic already existing in
+ # compiler_for_spec Should think whether this can be more
+ # efficient
+ def _proper_compiler_style(cspec, arch):
+ platform = arch.platform
+ compilers = spack.compilers.compilers_for_spec(cspec,
+ platform=platform)
+ return filter(lambda c: c.operating_system ==
+ arch.platform_os, compilers)
+ # return compilers
+
all_compilers = spack.compilers.all_compilers()
if (spec.compiler and
spec.compiler.concrete and
- spec.compiler in all_compilers):
+ spec.compiler in all_compilers):
return False
- #Find the another spec that has a compiler, or the root if none do
- other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
+ # Find the another spec that has a compiler, or the root if none do
+ other_spec = spec if spec.compiler else find_spec(
+ spec, lambda x: x.compiler)
+
if not other_spec:
other_spec = spec.root
other_compiler = other_spec.compiler
@@ -257,68 +330,97 @@ class DefaultConcretizer(object):
spec.compiler = other_compiler.copy()
return True
- # Filter the compilers into a sorted list based on the compiler_order from spackconfig
- compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler)
- cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name)
+ # Filter the compilers into a sorted list based on the compiler_order
+ # from spackconfig
+ compiler_list = all_compilers if not other_compiler else \
+ spack.compilers.find(other_compiler)
+ cmp_compilers = partial(
+ spack.pkgsort.compiler_compare, other_spec.name)
matches = sorted(compiler_list, cmp=cmp_compilers)
if not matches:
- raise UnavailableCompilerVersionError(other_compiler)
+ arch = spec.architecture
+ raise UnavailableCompilerVersionError(other_compiler,
+ arch.platform_os)
# copy concrete version into other_compiler
- spec.compiler = matches[0].copy()
+ index = 0
+ while not _proper_compiler_style(matches[index], spec.architecture):
+ index += 1
+ if index == len(matches) - 1:
+ arch = spec.architecture
+ raise UnavailableCompilerVersionError(spec.compiler,
+ arch.platform_os)
+ spec.compiler = matches[index].copy()
assert(spec.compiler.concrete)
return True # things changed.
-
def concretize_compiler_flags(self, spec):
"""
The compiler flags are updated to match those of the spec whose
compiler is used, defaulting to no compiler flags in the spec.
Default specs set at the compiler level will still be added later.
"""
+
+ if not spec.architecture.platform_os:
+ # Although this usually means changed, this means awaiting other
+ # changes
+ return True
+
ret = False
for flag in spack.spec.FlagMap.valid_compiler_flags():
try:
nearest = next(p for p in spec.traverse(direction='parents')
- if ((p.compiler == spec.compiler and p is not spec)
- and flag in p.compiler_flags))
- if ((not flag in spec.compiler_flags) or
- sorted(spec.compiler_flags[flag]) != sorted(nearest.compiler_flags[flag])):
+ if ((p.compiler == spec.compiler and
+ p is not spec) and
+ flag in p.compiler_flags))
+ if flag not in spec.compiler_flags or \
+ not (sorted(spec.compiler_flags[flag]) >=
+ sorted(nearest.compiler_flags[flag])):
if flag in spec.compiler_flags:
- spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
- set(nearest.compiler_flags[flag]))
+ spec.compiler_flags[flag] = list(
+ set(spec.compiler_flags[flag]) |
+ set(nearest.compiler_flags[flag]))
else:
- spec.compiler_flags[flag] = nearest.compiler_flags[flag]
+ spec.compiler_flags[
+ flag] = nearest.compiler_flags[flag]
ret = True
except StopIteration:
- if (flag in spec.root.compiler_flags and ((not flag in spec.compiler_flags) or
- sorted(spec.compiler_flags[flag]) != sorted(spec.root.compiler_flags[flag]))):
+ if (flag in spec.root.compiler_flags and
+ ((flag not in spec.compiler_flags) or
+ sorted(spec.compiler_flags[flag]) !=
+ sorted(spec.root.compiler_flags[flag]))):
if flag in spec.compiler_flags:
- spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
- set(spec.root.compiler_flags[flag]))
+ spec.compiler_flags[flag] = list(
+ set(spec.compiler_flags[flag]) |
+ set(spec.root.compiler_flags[flag]))
else:
- spec.compiler_flags[flag] = spec.root.compiler_flags[flag]
+ spec.compiler_flags[
+ flag] = spec.root.compiler_flags[flag]
ret = True
else:
- if not flag in spec.compiler_flags:
+ if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = []
# Include the compiler flag defaults from the config files
# This ensures that spack will detect conflicts that stem from a change
# in default compiler flags.
- compiler = spack.compilers.compiler_for_spec(spec.compiler)
+ compiler = spack.compilers.compiler_for_spec(
+ spec.compiler, spec.architecture)
for flag in compiler.flags:
if flag not in spec.compiler_flags:
spec.compiler_flags[flag] = compiler.flags[flag]
if compiler.flags[flag] != []:
ret = True
else:
- if ((sorted(spec.compiler_flags[flag]) != sorted(compiler.flags[flag])) and
- (not set(spec.compiler_flags[flag]) >= set(compiler.flags[flag]))):
+ if ((sorted(spec.compiler_flags[flag]) !=
+ sorted(compiler.flags[flag])) and
+ (not set(spec.compiler_flags[flag]) >=
+ set(compiler.flags[flag]))):
ret = True
- spec.compiler_flags[flag] = list(set(spec.compiler_flags[flag]) |
- set(compiler.flags[flag]))
+ spec.compiler_flags[flag] = list(
+ set(spec.compiler_flags[flag]) |
+ set(compiler.flags[flag]))
return ret
@@ -327,8 +429,10 @@ def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
- dagiter = chain(spec.traverse(direction='parents', root=False),
- spec.traverse(direction='children', root=False))
+ deptype = ('build', 'link')
+ dagiter = chain(
+ spec.traverse(direction='parents', deptype=deptype, root=False),
+ spec.traverse(direction='children', deptype=deptype, root=False))
visited = set()
for relative in dagiter:
if condition(relative):
@@ -336,9 +440,11 @@ def find_spec(spec, condition):
visited.add(id(relative))
# Then search all other relatives in the DAG *except* spec
- for relative in spec.root.traverse():
- if relative is spec: continue
- if id(relative) in visited: continue
+ for relative in spec.root.traverse(deptypes=spack.alldeps):
+ if relative is spec:
+ continue
+ if id(relative) in visited:
+ continue
if condition(relative):
return relative
@@ -385,25 +491,33 @@ def cmp_specs(lhs, rhs):
class UnavailableCompilerVersionError(spack.error.SpackError):
+
"""Raised when there is no available compiler that satisfies a
compiler spec."""
- def __init__(self, compiler_spec):
+
+ def __init__(self, compiler_spec, operating_system):
super(UnavailableCompilerVersionError, self).__init__(
- "No available compiler version matches '%s'" % compiler_spec,
+ "No available compiler version matches '%s' on operating_system %s"
+ % (compiler_spec, operating_system),
"Run 'spack compilers' to see available compiler Options.")
class NoValidVersionError(spack.error.SpackError):
+
"""Raised when there is no way to have a concrete version for a
particular spec."""
+
def __init__(self, spec):
super(NoValidVersionError, self).__init__(
- "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions))
+ "There are no valid versions for %s that match '%s'"
+ % (spec.name, spec.versions))
class NoBuildError(spack.error.SpackError):
"""Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found"""
+
def __init__(self, spec):
- super(NoBuildError, self).__init__(
- "The spec '%s' is configured as not buildable, and no matching external installs were found" % spec.name)
+ msg = ("The spec '%s' is configured as not buildable, "
+ "and no matching external installs were found")
+ super(NoBuildError, self).__init__(msg % spec.name)
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 88544aa7bb..c90eff4229 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -1,4 +1,3 @@
-# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -123,251 +122,31 @@ import os
import re
import sys
-import jsonschema
-import llnl.util.tty as tty
-import spack
import yaml
+import jsonschema
+from yaml.error import MarkedYAMLError
from jsonschema import Draft4Validator, validators
-from llnl.util.filesystem import mkdirp
from ordereddict_backport import OrderedDict
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import mkdirp
+
+import spack
from spack.error import SpackError
-from yaml.error import MarkedYAMLError
+import spack.schema
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
-
+from spack.build_environment import get_path_from_module
"""Dict from section names -> schema for that section."""
section_schemas = {
- 'compilers': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack compiler configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- 'compilers:?': { # optional colon for overriding site config.
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': { # architecture
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*@\w[\w-]*': { # compiler spec
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['cc', 'cxx', 'f77', 'fc'],
- 'properties': {
- 'cc': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'cxx': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'f77': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'fc': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'fflags': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'cppflags': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'cflags': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'cxxflags': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'ldflags': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'ldlibs': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- },},},},},},},},
-
- 'mirrors': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack mirror configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'mirrors:?': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'string'},},},},},
-
- 'repos': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack repository configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'repos:?': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'},},},},
-
- 'packages': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack package configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'packages:?': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': { # package name
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'version': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'anyOf' : [ { 'type' : 'string' },
- { 'type' : 'number'}]}}, #version strings
- 'compiler': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'type' : 'string' } }, #compiler specs
- 'buildable': {
- 'type': 'boolean',
- 'default': True,
- },
- 'providers': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'type' : 'string' },},},},
- 'paths': {
- 'type' : 'object',
- 'default' : {},
- }
- },},},},},},
-
- 'modules': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack module file configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'definitions': {
- 'array_of_strings': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'
- }
- },
- 'dictionary_of_strings': {
- 'type': 'object',
- 'patternProperties': {
- r'\w[\w-]*': { # key
- 'type': 'string'
- }
- }
- },
- 'dependency_selection': {
- 'type': 'string',
- 'enum': ['none', 'direct', 'all']
- },
- 'module_file_configuration': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'filter': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'environment_blacklist': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'
- }
- }
- }
- },
- 'autoload': {'$ref': '#/definitions/dependency_selection'},
- 'prerequisites': {'$ref': '#/definitions/dependency_selection'},
- 'conflict': {'$ref': '#/definitions/array_of_strings'},
- 'environment': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'set': {'$ref': '#/definitions/dictionary_of_strings'},
- 'unset': {'$ref': '#/definitions/array_of_strings'},
- 'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'},
- 'append_path': {'$ref': '#/definitions/dictionary_of_strings'}
- }
- }
- }
- },
- 'module_type_configuration': {
- 'type': 'object',
- 'default': {},
- 'anyOf': [
- {
- 'properties': {
- 'whitelist': {'$ref': '#/definitions/array_of_strings'},
- 'blacklist': {'$ref': '#/definitions/array_of_strings'},
- 'naming_scheme': {
- 'type': 'string' # Can we be more specific here?
- }
- }
- },
- {
- 'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}}
- }
- ]
- }
- },
- 'patternProperties': {
- r'modules:?': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'prefix_inspections': {
- 'type': 'object',
- 'patternProperties': {
- r'\w[\w-]*': { # path to be inspected for existence (relative to prefix)
- '$ref': '#/definitions/array_of_strings'
- }
- }
- },
- 'enable': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string',
- 'enum': ['tcl', 'dotkit']
- }
- },
- 'tcl': {
- 'allOf': [
- {'$ref': '#/definitions/module_type_configuration'}, # Base configuration
- {} # Specific tcl extensions
- ]
- },
- 'dotkit': {
- 'allOf': [
- {'$ref': '#/definitions/module_type_configuration'}, # Base configuration
- {} # Specific dotkit extensions
- ]
- },
- }
- },
- },
- },
+ 'compilers': spack.schema.compilers.schema,
+ 'mirrors': spack.schema.mirrors.schema,
+ 'repos': spack.schema.repos.schema,
+ 'packages': spack.schema.packages.schema,
+ 'targets': spack.schema.targets.schema,
+ 'modules': spack.schema.modules.schema,
}
"""OrderedDict of config scopes keyed by name.
@@ -384,7 +163,7 @@ def validate_section_name(section):
def extend_with_default(validator_class):
- """Add support for the 'default' attribute for properties and patternProperties.
+ """Add support for the 'default' attr for properties and patternProperties.
jsonschema does not handle this out of the box -- it only
validates. This allows us to set default values for configs
@@ -393,13 +172,15 @@ def extend_with_default(validator_class):
"""
validate_properties = validator_class.VALIDATORS["properties"]
- validate_pattern_properties = validator_class.VALIDATORS["patternProperties"]
+ validate_pattern_properties = validator_class.VALIDATORS[
+ "patternProperties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems():
if "default" in subschema:
instance.setdefault(property, subschema["default"])
- for err in validate_properties(validator, properties, instance, schema):
+ for err in validate_properties(
+ validator, properties, instance, schema):
yield err
def set_pp_defaults(validator, properties, instance, schema):
@@ -410,7 +191,8 @@ def extend_with_default(validator_class):
if re.match(property, key) and val is None:
instance[key] = subschema["default"]
- for err in validate_pattern_properties(validator, properties, instance, schema):
+ for err in validate_pattern_properties(
+ validator, properties, instance, schema):
yield err
return validators.extend(validator_class, {
@@ -475,15 +257,23 @@ class ConfigScope(object):
except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data)
except (yaml.YAMLError, IOError) as e:
- raise ConfigFileError("Error writing to config file: '%s'" % str(e))
+ raise ConfigFileError(
+ "Error writing to config file: '%s'" % str(e))
def clear(self):
"""Empty cached config information."""
self.sections = {}
+"""Default configuration scope is the lowest-level scope. These are
+ versioned with Spack and can be overridden by sites or users."""
+ConfigScope('defaults', os.path.join(spack.etc_path, 'spack', 'defaults'))
+
+"""Site configuration is per spack instance, for sites or projects.
+ No site-level configs should be checked into spack by default."""
+ConfigScope('site', os.path.join(spack.etc_path, 'spack'))
-ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
-ConfigScope('user', os.path.expanduser('~/.spack'))
+"""User configuration can override both spack defaults and site config."""
+ConfigScope('user', spack.user_config_path)
def highest_precedence_scope():
@@ -575,8 +365,7 @@ def _merge_yaml(dest, source):
# Source list is prepended (for precedence)
if they_are(list):
- seen = set(source)
- dest[:] = source + [x for x in dest if x not in seen]
+ dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
@@ -667,7 +456,7 @@ def print_section(section):
data = syaml.syaml_dict()
data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
- except (yaml.YAMLError, IOError) as e:
+ except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
@@ -679,7 +468,8 @@ def spec_externals(spec):
external_specs = []
pkg_paths = allpkgs.get(name, {}).get('paths', None)
- if not pkg_paths:
+ pkg_modules = allpkgs.get(name, {}).get('modules', None)
+ if (not pkg_paths) and (not pkg_modules):
return []
for external_spec, path in pkg_paths.iteritems():
@@ -690,6 +480,18 @@ def spec_externals(spec):
external_spec = spack.spec.Spec(external_spec, external=path)
if external_spec.satisfies(spec):
external_specs.append(external_spec)
+
+ for external_spec, module in pkg_modules.iteritems():
+ if not module:
+ continue
+
+ path = get_path_from_module(module)
+
+ external_spec = spack.spec.Spec(
+ external_spec, external=path, external_module=module)
+ if external_spec.satisfies(spec):
+ external_specs.append(external_spec)
+
return external_specs
@@ -720,6 +522,7 @@ def get_path(path, data):
class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
+
def __init__(self, validation_error, data):
# Try to get line number from erroneous instance and its parent
instance_mark = getattr(validation_error.instance, '_start_mark', None)
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index e768ddf5fe..f3dcdef0a9 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -60,7 +60,7 @@ from spack.repository import UnknownPackageError
_db_dirname = '.spack-db'
# DB version. This is stuck in the DB file to track changes in format.
-_db_version = Version('0.9.1')
+_db_version = Version('0.9.2')
# Default timeout for spack database locks is 5 min.
_db_lock_timeout = 60
@@ -119,6 +119,7 @@ class InstallRecord(object):
class Database(object):
+
def __init__(self, root, db_dir=None):
"""Create a Database for Spack installations under ``root``.
@@ -165,11 +166,11 @@ class Database(object):
def write_transaction(self, timeout=_db_lock_timeout):
"""Get a write lock context manager for use in a `with` block."""
- return WriteTransaction(self, self._read, self._write, timeout)
+ return WriteTransaction(self.lock, self._read, self._write, timeout)
def read_transaction(self, timeout=_db_lock_timeout):
"""Get a read lock context manager for use in a `with` block."""
- return ReadTransaction(self, self._read, None, timeout)
+ return ReadTransaction(self.lock, self._read, timeout=timeout)
def _write_to_yaml(self, stream):
"""Write out the databsae to a YAML file.
@@ -214,9 +215,11 @@ class Database(object):
# Add dependencies from other records in the install DB to
# form a full spec.
- for dep_hash in spec_dict[spec.name]['dependencies'].values():
- child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
- spec._add_dependency(child)
+ if 'dependencies' in spec_dict[spec.name]:
+ yaml_deps = spec_dict[spec.name]['dependencies']
+ for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
+ child = self._read_spec_from_yaml(dhash, installs, hash_key)
+ spec._add_dependency(child, dtypes)
# Specs from the database need to be marked concrete because
# they represent actual installations.
@@ -289,7 +292,8 @@ class Database(object):
except Exception as e:
tty.warn("Invalid database reecord:",
"file: %s" % self._index_path,
- "hash: %s" % hash_key, "cause: %s" % str(e))
+ "hash: %s" % hash_key,
+ "cause: %s: %s" % (type(e).__name__, str(e)))
raise
self._data = data
@@ -309,7 +313,11 @@ class Database(object):
for spec in directory_layout.all_specs():
# Create a spec for each known package and add it.
path = directory_layout.path_for_spec(spec)
- self._add(spec, path, directory_layout)
+ old_info = old_data.get(spec.dag_hash())
+ explicit = False
+ if old_info is not None:
+ explicit = old_info.explicit
+ self._add(spec, path, directory_layout, explicit=explicit)
self._check_ref_counts()
@@ -328,7 +336,10 @@ class Database(object):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
- for dep in rec.spec.dependencies.values():
+ # XXX(deptype): This checks all dependencies, but build
+ # dependencies might be able to be dropped in the
+ # future.
+ for dep in rec.spec.dependencies():
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@@ -342,12 +353,22 @@ class Database(object):
"Invalid ref_count: %s: %d (expected %d), in DB %s" %
(key, found, expected, self._index_path))
- def _write(self):
+ def _write(self, type, value, traceback):
"""Write the in-memory database index to its file path.
- Does no locking.
+ This is a helper function called by the WriteTransaction context
+ manager. If there is an exception while the write lock is active,
+ nothing will be written to the database file, but the in-memory
+ database *may* be left in an inconsistent state. It will be consistent
+ after the start of the next transaction, when it read from disk again.
+
+ This routine does no locking.
"""
+ # Do not write if exceptions were raised
+ if type is not None:
+ return
+
temp_file = self._index_path + (
'.%s.%s.temp' % (socket.getfqdn(), os.getpid()))
@@ -400,7 +421,7 @@ class Database(object):
else:
self._data[key] = InstallRecord(spec, path, True,
explicit=explicit)
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies(('link', 'run')):
self._increment_ref_count(dep, directory_layout)
def _increment_ref_count(self, spec, directory_layout=None):
@@ -415,7 +436,7 @@ class Database(object):
self._data[key] = InstallRecord(spec.copy(), path, installed)
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies('link'):
self._increment_ref_count(dep)
self._data[key].ref_count += 1
@@ -460,7 +481,7 @@ class Database(object):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies('link'):
self._decrement_ref_count(dep)
def _remove(self, spec):
@@ -474,7 +495,7 @@ class Database(object):
return rec.spec
del self._data[key]
- for dep in rec.spec.dependencies.values():
+ for dep in rec.spec.dependencies('link'):
self._decrement_ref_count(dep)
# Returns the concrete spec so we know it in the case where a
@@ -579,57 +600,19 @@ class Database(object):
return key in self._data and not self._data[key].installed
-class _Transaction(object):
- """Simple nested transaction context manager that uses a file lock.
-
- This class can trigger actions when the lock is acquired for the
- first time and released for the last.
-
- Timeout for lock is customizable.
- """
-
- def __init__(self, db,
- acquire_fn=None,
- release_fn=None,
- timeout=_db_lock_timeout):
- self._db = db
- self._timeout = timeout
- self._acquire_fn = acquire_fn
- self._release_fn = release_fn
-
- def __enter__(self):
- if self._enter() and self._acquire_fn:
- self._acquire_fn()
-
- def __exit__(self, type, value, traceback):
- if self._exit() and self._release_fn:
- self._release_fn()
-
-
-class ReadTransaction(_Transaction):
- def _enter(self):
- return self._db.lock.acquire_read(self._timeout)
-
- def _exit(self):
- return self._db.lock.release_read()
-
-
-class WriteTransaction(_Transaction):
- def _enter(self):
- return self._db.lock.acquire_write(self._timeout)
-
- def _exit(self):
- return self._db.lock.release_write()
-
-
class CorruptDatabaseError(SpackError):
+
def __init__(self, path, msg=''):
super(CorruptDatabaseError, self).__init__(
- "Spack database is corrupt: %s. %s" % (path, msg))
+ "Spack database is corrupt: %s. %s." % (path, msg),
+ "Try running `spack reindex` to fix.")
class InvalidDatabaseVersionError(SpackError):
+
def __init__(self, expected, found):
super(InvalidDatabaseVersionError, self).__init__(
- "Expected database version %s but found version %s" %
- (expected, found))
+ "Expected database version %s but found version %s."
+ % (expected, found),
+ "`spack reindex` may fix this, or you may need a newer "
+ "Spack version.")
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 51b26773e2..313bf48f0d 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -171,7 +171,7 @@ def version(pkg, ver, checksum=None, **kwargs):
pkg.versions[Version(ver)] = kwargs
-def _depends_on(pkg, spec, when=None):
+def _depends_on(pkg, spec, when=None, type=None):
# If when is False do nothing
if when is False:
return
@@ -180,10 +180,29 @@ def _depends_on(pkg, spec, when=None):
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
+ if type is None:
+ # The default deptype is build and link because the common case is to
+ # build against a library which then turns into a runtime dependency
+ # due to the linker.
+ # XXX(deptype): Add 'run' to this? It's an uncommon dependency type,
+ # but is most backwards-compatible.
+ type = ('build', 'link')
+
+ if isinstance(type, str):
+ type = spack.spec.special_types.get(type, (type,))
+
+ for deptype in type:
+ if deptype not in spack.spec.alldeps:
+ raise UnknownDependencyTypeError('depends_on', pkg.name, deptype)
+
dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
+ pkg_deptypes = pkg._deptypes.setdefault(dep_spec.name, set())
+ for deptype in type:
+ pkg_deptypes.add(deptype)
+
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
@@ -191,13 +210,13 @@ def _depends_on(pkg, spec, when=None):
conditions[when_spec] = dep_spec
-@directive('dependencies')
-def depends_on(pkg, spec, when=None):
+@directive(('dependencies', '_deptypes'))
+def depends_on(pkg, spec, when=None, type=None):
"""Creates a dict of deps with specs defining when they apply."""
- _depends_on(pkg, spec, when=when)
+ _depends_on(pkg, spec, when=when, type=type)
-@directive(('extendees', 'dependencies'))
+@directive(('extendees', 'dependencies', '_deptypes'))
def extends(pkg, spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
@@ -257,7 +276,7 @@ def variant(pkg, name, default=False, description=""):
"""Define a variant for the package. Packager can specify a default
value (on or off) as well as a text description."""
- default = bool(default)
+ default = default
description = str(description).strip()
if not re.match(spack.spec.identifier_re, name):
@@ -326,3 +345,14 @@ class CircularReferenceError(DirectiveError):
directive,
"Package '%s' cannot pass itself to %s" % (package, directive))
self.package = package
+
+
+class UnknownDependencyTypeError(DirectiveError):
+ """This is raised when a dependency is of an unknown type."""
+
+ def __init__(self, directive, package, deptype):
+ super(UnknownDependencyTypeError, self).__init__(
+ directive,
+ "Package '%s' cannot depend on a package via %s."
+ % (package, deptype))
+ self.package = package
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 32d27d7bd0..0ae6f765f4 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -22,18 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import re
import os
import exceptions
-import hashlib
import shutil
import glob
import tempfile
import yaml
-import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
+import spack
from spack.spec import Spec
from spack.error import SpackError
@@ -50,10 +48,10 @@ class DirectoryLayout(object):
install, and they can use this to customize the nesting structure of
spack installs.
"""
+
def __init__(self, root):
self.root = root
-
@property
def hidden_file_paths(self):
"""Return a list of hidden files used by the directory layout.
@@ -66,25 +64,21 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def all_specs(self):
"""To be implemented by subclasses to traverse all specs for which there is
a directory within the root.
"""
raise NotImplementedError()
-
def relative_path_for_spec(self, spec):
"""Implemented by subclasses to return a relative path from the install
root to a unique location for the provided spec."""
raise NotImplementedError()
-
def create_install_directory(self, spec):
"""Creates the installation directory for a spec."""
raise NotImplementedError()
-
def check_installed(self, spec):
"""Checks whether a spec is installed.
@@ -94,7 +88,6 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def extension_map(self, spec):
"""Get a dict of currently installed extension packages for a spec.
@@ -103,7 +96,6 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def check_extension_conflict(self, spec, ext_spec):
"""Ensure that ext_spec can be activated in spec.
@@ -112,7 +104,6 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def check_activated(self, spec, ext_spec):
"""Ensure that ext_spec can be removed from spec.
@@ -120,26 +111,22 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def add_extension(self, spec, ext_spec):
"""Add to the list of currently installed extensions."""
raise NotImplementedError()
-
def remove_extension(self, spec, ext_spec):
"""Remove from the list of currently installed extensions."""
raise NotImplementedError()
-
def path_for_spec(self, spec):
- """Return an absolute path from the root to a directory for the spec."""
+ """Return absolute path from the root to a directory for the spec."""
_check_concrete(spec)
path = self.relative_path_for_spec(spec)
assert(not path.startswith(self.root))
return os.path.join(self.root, path)
-
def remove_install_directory(self, spec):
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
@@ -165,7 +152,7 @@ class DirectoryLayout(object):
class YamlDirectoryLayout(DirectoryLayout):
"""Lays out installation directories like this::
<install root>/
- <architecture>/
+ <platform-os-target>/
<compiler>-<compiler version>/
<name>-<version>-<variants>-<hash>
@@ -176,6 +163,7 @@ class YamlDirectoryLayout(DirectoryLayout):
only enabled variants are included in the install path.
Disabled variants are omitted.
"""
+
def __init__(self, root, **kwargs):
super(YamlDirectoryLayout, self).__init__(root)
self.metadata_dir = kwargs.get('metadata_dir', '.spack')
@@ -190,12 +178,10 @@ class YamlDirectoryLayout(DirectoryLayout):
# Cache of already written/read extension maps.
self._extension_maps = {}
-
@property
def hidden_file_paths(self):
return (self.metadata_dir,)
-
def relative_path_for_spec(self, spec):
_check_concrete(spec)
@@ -214,49 +200,47 @@ class YamlDirectoryLayout(DirectoryLayout):
return path
-
def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, 'w') as f:
spec.to_yaml(f)
-
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
- with open(path) as f:
- spec = Spec.from_yaml(f)
+ try:
+ with open(path) as f:
+ spec = Spec.from_yaml(f)
+ except Exception as e:
+ if spack.debug:
+ raise
+ raise SpecReadError(
+ 'Unable to read file: %s' % path, 'Cause: ' + str(e))
# Specs read from actual installations are always concrete
spec._mark_concrete()
return spec
-
def spec_file_path(self, spec):
"""Gets full path to spec file"""
_check_concrete(spec)
return join_path(self.metadata_path(spec), self.spec_file_name)
-
def metadata_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir)
-
def build_log_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_log_name)
-
def build_env_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_env_name)
-
def build_packages_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.packages_dir)
-
def create_install_directory(self, spec):
_check_concrete(spec)
@@ -267,7 +251,6 @@ class YamlDirectoryLayout(DirectoryLayout):
mkdirp(self.metadata_path(spec))
self.write_spec(spec, self.spec_file_path(spec))
-
def check_installed(self, spec):
_check_concrete(spec)
path = self.path_for_spec(spec)
@@ -278,7 +261,7 @@ class YamlDirectoryLayout(DirectoryLayout):
if not os.path.isfile(spec_file_path):
raise InconsistentInstallDirectoryError(
- 'Inconsistent state: install prefix exists but contains no spec.yaml:',
+ 'Install prefix exists but contains no spec.yaml:',
" " + path)
installed_spec = self.read_spec(spec_file_path)
@@ -286,12 +269,11 @@ class YamlDirectoryLayout(DirectoryLayout):
return path
if spec.dag_hash() == installed_spec.dag_hash():
- raise SpecHashCollisionError(installed_hash, spec_hash)
+ raise SpecHashCollisionError(spec, installed_spec)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match hash!' % spec_file_path)
-
def all_specs(self):
if not os.path.isdir(self.root):
return []
@@ -301,20 +283,17 @@ class YamlDirectoryLayout(DirectoryLayout):
spec_files = glob.glob(pattern)
return [self.read_spec(s) for s in spec_files]
-
def specs_by_hash(self):
by_hash = {}
for spec in self.all_specs():
by_hash[spec.dag_hash()] = spec
return by_hash
-
def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file"""
_check_concrete(spec)
return join_path(self.metadata_path(spec), self.extension_file_name)
-
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
@@ -326,23 +305,22 @@ class YamlDirectoryLayout(DirectoryLayout):
# write tmp file
with tmp:
yaml.dump({
- 'extensions' : [
- { ext.name : {
- 'hash' : ext.dag_hash(),
- 'path' : str(ext.prefix)
+ 'extensions': [
+ {ext.name: {
+ 'hash': ext.dag_hash(),
+ 'path': str(ext.prefix)
}} for ext in sorted(extensions.values())]
}, tmp, default_flow_style=False)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
-
def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currently
installed for this package."""
_check_concrete(spec)
- if not spec in self._extension_maps:
+ if spec not in self._extension_maps:
path = self.extension_file_path(spec)
if not os.path.exists(path):
self._extension_maps[spec] = {}
@@ -357,14 +335,14 @@ class YamlDirectoryLayout(DirectoryLayout):
dag_hash = entry[name]['hash']
prefix = entry[name]['path']
- if not dag_hash in by_hash:
+ if dag_hash not in by_hash:
raise InvalidExtensionSpecError(
"Spec %s not found in %s" % (dag_hash, prefix))
ext_spec = by_hash[dag_hash]
- if not prefix == ext_spec.prefix:
+ if prefix != ext_spec.prefix:
raise InvalidExtensionSpecError(
- "Prefix %s does not match spec with hash %s: %s"
+ "Prefix %s does not match spec hash %s: %s"
% (prefix, dag_hash, ext_spec))
exts[ext_spec.name] = ext_spec
@@ -372,13 +350,11 @@ class YamlDirectoryLayout(DirectoryLayout):
return self._extension_maps[spec]
-
def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API."""
_check_concrete(spec)
return self._extension_map(spec).copy()
-
def check_extension_conflict(self, spec, ext_spec):
exts = self._extension_map(spec)
if ext_spec.name in exts:
@@ -388,13 +364,11 @@ class YamlDirectoryLayout(DirectoryLayout):
else:
raise ExtensionConflictError(spec, ext_spec, installed_spec)
-
def check_activated(self, spec, ext_spec):
exts = self._extension_map(spec)
- if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]):
+ if (ext_spec.name not in exts) or (ext_spec != exts[ext_spec.name]):
raise NoSuchExtensionError(spec, ext_spec)
-
def add_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
@@ -407,7 +381,6 @@ class YamlDirectoryLayout(DirectoryLayout):
exts[ext_spec.name] = ext_spec
self._write_extensions(spec, exts)
-
def remove_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
@@ -423,20 +396,23 @@ class YamlDirectoryLayout(DirectoryLayout):
class DirectoryLayoutError(SpackError):
"""Superclass for directory layout errors."""
+
def __init__(self, message, long_msg=None):
super(DirectoryLayoutError, self).__init__(message, long_msg)
class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an install layout."""
+
def __init__(self, installed_spec, new_spec):
super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!'
- % installed_spec, new_spec)
+ % (installed_spec, new_spec))
class RemoveFailedError(DirectoryLayoutError):
"""Raised when a DirectoryLayout cannot remove an install prefix."""
+
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s'
@@ -446,41 +422,50 @@ class RemoveFailedError(DirectoryLayoutError):
class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
+
def __init__(self, message, long_msg=None):
- super(InconsistentInstallDirectoryError, self).__init__(message, long_msg)
+ super(InconsistentInstallDirectoryError, self).__init__(
+ message, long_msg)
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"""Raised when create_install_directory is called unnecessarily."""
+
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")
+class SpecReadError(DirectoryLayoutError):
+ """Raised when directory layout can't read a spec."""
+
+
class InvalidExtensionSpecError(DirectoryLayoutError):
"""Raised when an extension file has a bad spec in it."""
- def __init__(self, message):
- super(InvalidExtensionSpecError, self).__init__(message)
class ExtensionAlreadyInstalledError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
+
def __init__(self, spec, ext_spec):
super(ExtensionAlreadyInstalledError, self).__init__(
- "%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec))
+ "%s is already installed in %s"
+ % (ext_spec.short_spec, spec.short_spec))
class ExtensionConflictError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
+
def __init__(self, spec, ext_spec, conflict):
super(ExtensionConflictError, self).__init__(
- "%s cannot be installed in %s because it conflicts with %s"% (
- ext_spec.short_spec, spec.short_spec, conflict.short_spec))
+ "%s cannot be installed in %s because it conflicts with %s"
+ % (ext_spec.short_spec, spec.short_spec, conflict.short_spec))
class NoSuchExtensionError(DirectoryLayoutError):
"""Raised when an extension isn't there on deactivate."""
+
def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__(
- "%s cannot be removed from %s because it's not activated."% (
- ext_spec.short_spec, spec.short_spec))
+ "%s cannot be removed from %s because it's not activated."
+ % (ext_spec.short_spec, spec.short_spec))
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index af642dcc9b..613ece2f45 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -24,44 +24,60 @@
##############################################################################
import collections
import inspect
+import json
import os
import os.path
+import subprocess
class NameModifier(object):
+
def __init__(self, name, **kwargs):
self.name = name
self.args = {'name': name}
self.args.update(kwargs)
+ def update_args(self, **kwargs):
+ self.__dict__.update(kwargs)
+ self.args.update(kwargs)
+
class NameValueModifier(object):
+
def __init__(self, name, value, **kwargs):
self.name = name
self.value = value
self.separator = kwargs.get('separator', ':')
- self.args = {'name': name, 'value': value, 'delim': self.separator}
+ self.args = {'name': name, 'value': value, 'separator': self.separator}
+ self.args.update(kwargs)
+
+ def update_args(self, **kwargs):
+ self.__dict__.update(kwargs)
self.args.update(kwargs)
class SetEnv(NameValueModifier):
+
def execute(self):
os.environ[self.name] = str(self.value)
class UnsetEnv(NameModifier):
+
def execute(self):
# Avoid throwing if the variable was not set
os.environ.pop(self.name, None)
class SetPath(NameValueModifier):
+
def execute(self):
string_path = concatenate_paths(self.value, separator=self.separator)
os.environ[self.name] = string_path
class AppendPath(NameValueModifier):
+
def execute(self):
environment_value = os.environ.get(self.name, '')
directories = environment_value.split(
@@ -71,6 +87,7 @@ class AppendPath(NameValueModifier):
class PrependPath(NameValueModifier):
+
def execute(self):
environment_value = os.environ.get(self.name, '')
directories = environment_value.split(
@@ -80,6 +97,7 @@ class PrependPath(NameValueModifier):
class RemovePath(NameValueModifier):
+
def execute(self):
environment_value = os.environ.get(self.name, '')
directories = environment_value.split(
@@ -90,6 +108,7 @@ class RemovePath(NameValueModifier):
class EnvironmentModifications(object):
+
"""
Keeps track of requests to modify the current environment.
@@ -240,6 +259,131 @@ class EnvironmentModifications(object):
for x in actions:
x.execute()
+ @staticmethod
+ def from_sourcing_files(*args, **kwargs):
+ """
+ Creates an instance of EnvironmentModifications that, if executed,
+ has the same effect on the environment as sourcing the files passed as
+ parameters
+
+ Args:
+ *args: list of files to be sourced
+
+ Returns:
+ instance of EnvironmentModifications
+ """
+ env = EnvironmentModifications()
+ # Check if the files are actually there
+ if not all(os.path.isfile(file) for file in args):
+ raise RuntimeError('trying to source non-existing files')
+ # Relevant kwd parameters and formats
+ info = dict(kwargs)
+ info.setdefault('shell', '/bin/bash')
+ info.setdefault('shell_options', '-c')
+ info.setdefault('source_command', 'source')
+ info.setdefault('suppress_output', '&> /dev/null')
+ info.setdefault('concatenate_on_success', '&&')
+
+ shell = '{shell}'.format(**info)
+ shell_options = '{shell_options}'.format(**info)
+ source_file = '{source_command} {file} {concatenate_on_success}'
+
+ dump_cmd = "import os, json; print json.dumps(dict(os.environ))"
+ dump_environment = 'python -c "%s"' % dump_cmd
+
+ # Construct the command that will be executed
+ command = [source_file.format(file=file, **info) for file in args]
+ command.append(dump_environment)
+ command = ' '.join(command)
+ command = [
+ shell,
+ shell_options,
+ command
+ ]
+
+ # Try to source all the files,
+ proc = subprocess.Popen(
+ command, stdout=subprocess.PIPE, env=os.environ)
+ proc.wait()
+ if proc.returncode != 0:
+ raise RuntimeError('sourcing files returned a non-zero exit code')
+ output = ''.join([line for line in proc.stdout])
+ # Construct a dictionary with all the variables in the new environment
+ after_source_env = dict(json.loads(output))
+ this_environment = dict(os.environ)
+
+ # Filter variables that are not related to sourcing a file
+ to_be_filtered = 'SHLVL', '_', 'PWD', 'OLDPWD'
+ for d in after_source_env, this_environment:
+ for name in to_be_filtered:
+ d.pop(name, None)
+
+ # Fill the EnvironmentModifications instance
+
+ # New variables
+ new_variables = set(after_source_env) - set(this_environment)
+ for x in new_variables:
+ env.set(x, after_source_env[x])
+ # Variables that have been unset
+ unset_variables = set(this_environment) - set(after_source_env)
+ for x in unset_variables:
+ env.unset(x)
+ # Variables that have been modified
+ common_variables = set(
+ this_environment).intersection(set(after_source_env))
+ modified_variables = [x for x in common_variables
+ if this_environment[x] != after_source_env[x]]
+
+ def return_separator_if_any(first_value, second_value):
+ separators = ':', ';'
+ for separator in separators:
+ if separator in first_value and separator in second_value:
+ return separator
+ return None
+
+ for x in modified_variables:
+ current = this_environment[x]
+ modified = after_source_env[x]
+ sep = return_separator_if_any(current, modified)
+ if sep is None:
+ # We just need to set the variable to the new value
+ env.set(x, after_source_env[x])
+ else:
+ current_list = current.split(sep)
+ modified_list = modified.split(sep)
+ # Paths that have been removed
+ remove_list = [
+ ii for ii in current_list if ii not in modified_list]
+ # Check that nothing has been added in the middle of vurrent
+ # list
+ remaining_list = [
+ ii for ii in current_list if ii in modified_list]
+ start = modified_list.index(remaining_list[0])
+ end = modified_list.index(remaining_list[-1])
+ search = sep.join(modified_list[start:end + 1])
+ if search not in current:
+ # We just need to set the variable to the new value
+ env.set(x, after_source_env[x])
+ break
+ else:
+ try:
+ prepend_list = modified_list[:start]
+ except KeyError:
+ prepend_list = []
+ try:
+ append_list = modified_list[end + 1:]
+ except KeyError:
+ append_list = []
+
+ for item in remove_list:
+ env.remove_path(x, item)
+ for item in append_list:
+ env.append_path(x, item)
+ for item in prepend_list:
+ env.prepend_path(x, item)
+
+ return env
+
def concatenate_paths(paths, separator=':'):
"""
@@ -266,7 +410,7 @@ def set_or_unset_not_first(variable, changes, errstream):
if indexes:
good = '\t \t{context} at {filename}:{lineno}'
nogood = '\t--->\t{context} at {filename}:{lineno}'
- message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501
+ message = "Suspicious requests to set or unset '{var}' found"
errstream(message.format(var=variable))
for ii, item in enumerate(changes):
print_format = nogood if ii in indexes else good
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index 85ad2fe249..c94875e91a 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -27,21 +27,21 @@ import sys
import llnl.util.tty as tty
import spack
+
class SpackError(Exception):
"""This is the superclass for all Spack errors.
Subclasses can be found in the modules they have to do with.
"""
+
def __init__(self, message, long_message=None):
super(SpackError, self).__init__()
self.message = message
self._long_message = long_message
-
@property
def long_message(self):
return self._long_message
-
def die(self):
if spack.debug:
sys.excepthook(*sys.exc_info())
@@ -52,21 +52,23 @@ class SpackError(Exception):
print self.long_message
os._exit(1)
-
def __str__(self):
msg = self.message
if self._long_message:
msg += "\n %s" % self._long_message
return msg
+
class UnsupportedPlatformError(SpackError):
"""Raised by packages when a platform is not supported"""
+
def __init__(self, message):
super(UnsupportedPlatformError, self).__init__(message)
class NoNetworkConnectionError(SpackError):
"""Raised when an operation needs an internet connection."""
+
def __init__(self, message, url):
super(NoNetworkConnectionError, self).__init__(
"No network connection: " + str(message),
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 1953d7c1b3..c69a23033c 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -307,10 +307,7 @@ class URLFetchStrategy(FetchStrategy):
if not self.archive_file:
raise NoArchiveFileError("Cannot call archive() before fetching.")
- if not extension(destination) == extension(self.archive_file):
- raise ValueError("Cannot archive without matching extensions.")
-
- shutil.move(self.archive_file, destination)
+ shutil.copy(self.archive_file, destination)
@_needs_stage
def check(self):
@@ -348,7 +345,7 @@ class URLFetchStrategy(FetchStrategy):
def __repr__(self):
url = self.url if self.url else "no url"
- return "URLFetchStrategy<%s>" % url
+ return "%s<%s>" % (self.__class__.__name__, url)
def __str__(self):
if self.url:
@@ -357,6 +354,25 @@ class URLFetchStrategy(FetchStrategy):
return "[no url]"
+class CacheURLFetchStrategy(URLFetchStrategy):
+ """The resource associated with a cache URL may be out of date."""
+
+ def __init__(self, *args, **kwargs):
+ super(CacheURLFetchStrategy, self).__init__(*args, **kwargs)
+
+ @_needs_stage
+ def fetch(self):
+ super(CacheURLFetchStrategy, self).fetch()
+ if self.digest:
+ try:
+ self.check()
+ except ChecksumError:
+ # Future fetchers will assume they don't need to
+ # download if the file remains
+ os.remove(self.archive_file)
+ raise
+
+
class VCSFetchStrategy(FetchStrategy):
def __init__(self, name, *rev_types, **kwargs):
@@ -502,6 +518,7 @@ class GitFetchStrategy(VCSFetchStrategy):
super(GitFetchStrategy, self).__init__(
'git', 'tag', 'branch', 'commit', **forwarded_args)
self._git = None
+ self.submodules = kwargs.get('submodules', False)
@property
def git_version(self):
@@ -580,6 +597,10 @@ class GitFetchStrategy(VCSFetchStrategy):
self.git('pull', '--tags', ignore_errors=1)
self.git('checkout', self.tag)
+ # Init submodules if the user asked for them.
+ if self.submodules:
+ self.git('submodule', 'update', '--init')
+
def archive(self, destination):
super(GitFetchStrategy, self).archive(destination, exclude='.git')
@@ -815,6 +836,36 @@ def for_package_version(pkg, version):
raise InvalidArgsError(pkg, version)
+class FsCache(object):
+
+ def __init__(self, root):
+ self.root = os.path.abspath(root)
+
+ def store(self, fetcher, relativeDst):
+ unique = False
+ uidGroups = [['tag', 'commit'], ['digest'], ['revision']]
+ for grp in uidGroups:
+ try:
+ unique |= any(getattr(fetcher, x) for x in grp)
+ except AttributeError:
+ pass
+ if unique:
+ break
+ if not unique:
+ return
+
+ dst = join_path(self.root, relativeDst)
+ mkdirp(os.path.dirname(dst))
+ fetcher.archive(dst)
+
+ def fetcher(self, targetPath, digest):
+ url = "file://" + join_path(self.root, targetPath)
+ return CacheURLFetchStrategy(url, digest)
+
+ def destroy(self):
+ shutil.rmtree(self.root, ignore_errors=True)
+
+
class FetchError(spack.error.SpackError):
def __init__(self, msg, long_msg=None):
diff --git a/lib/spack/spack/file_cache.py b/lib/spack/spack/file_cache.py
new file mode 100644
index 0000000000..0a66166fd8
--- /dev/null
+++ b/lib/spack/spack/file_cache.py
@@ -0,0 +1,185 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import shutil
+
+from llnl.util.filesystem import *
+from llnl.util.lock import *
+
+from spack.error import SpackError
+
+
+class FileCache(object):
+ """This class manages cached data in the filesystem.
+
+ - Cache files are fetched and stored by unique keys. Keys can be relative
+ paths, so that thre can be some hierarchy in the cache.
+
+ - The FileCache handles locking cache files for reading and writing, so
+ client code need not manage locks for cache entries.
+
+ """
+
+ def __init__(self, root):
+ """Create a file cache object.
+
+ This will create the cache directory if it does not exist yet.
+
+ """
+ self.root = root.rstrip(os.path.sep)
+ if not os.path.exists(self.root):
+ mkdirp(self.root)
+
+ self._locks = {}
+
+ def destroy(self):
+ """Remove all files under the cache root."""
+ for f in os.listdir(self.root):
+ path = join_path(self.root, f)
+ if os.path.isdir(path):
+ shutil.rmtree(path, True)
+ else:
+ os.remove(path)
+
+ def cache_path(self, key):
+ """Path to the file in the cache for a particular key."""
+ return join_path(self.root, key)
+
+ def _lock_path(self, key):
+ """Path to the file in the cache for a particular key."""
+ keyfile = os.path.basename(key)
+ keydir = os.path.dirname(key)
+
+ return join_path(self.root, keydir, '.' + keyfile + '.lock')
+
+ def _get_lock(self, key):
+ """Create a lock for a key, if necessary, and return a lock object."""
+ if key not in self._locks:
+ lock_file = self._lock_path(key)
+ if not os.path.exists(lock_file):
+ touch(lock_file)
+ self._locks[key] = Lock(lock_file)
+ return self._locks[key]
+
+ def init_entry(self, key):
+ """Ensure we can access a cache file. Create a lock for it if needed.
+
+ Return whether the cache file exists yet or not.
+ """
+ cache_path = self.cache_path(key)
+
+ exists = os.path.exists(cache_path)
+ if exists:
+ if not os.path.isfile(cache_path):
+ raise CacheError("Cache file is not a file: %s" % cache_path)
+
+ if not os.access(cache_path, os.R_OK | os.W_OK):
+ raise CacheError("Cannot access cache file: %s" % cache_path)
+ else:
+ # if the file is hierarchical, make parent directories
+ parent = os.path.dirname(cache_path)
+ if parent.rstrip(os.path.sep) != self.root:
+ mkdirp(parent)
+
+ if not os.access(parent, os.R_OK | os.W_OK):
+ raise CacheError("Cannot access cache directory: %s" % parent)
+
+ # ensure lock is created for this key
+ self._get_lock(key)
+ return exists
+
+ def read_transaction(self, key):
+ """Get a read transaction on a file cache item.
+
+ Returns a ReadTransaction context manager and opens the cache file for
+ reading. You can use it like this:
+
+ with spack.user_cache.read_transaction(key) as cache_file:
+ cache_file.read()
+
+ """
+ return ReadTransaction(
+ self._get_lock(key), lambda: open(self.cache_path(key)))
+
+ def write_transaction(self, key):
+ """Get a write transaction on a file cache item.
+
+ Returns a WriteTransaction context manager that opens a temporary file
+ for writing. Once the context manager finishes, if nothing went wrong,
+ moves the file into place on top of the old file atomically.
+
+ """
+ class WriteContextManager(object):
+
+ def __enter__(cm):
+ cm.orig_filename = self.cache_path(key)
+ cm.orig_file = None
+ if os.path.exists(cm.orig_filename):
+ cm.orig_file = open(cm.orig_filename, 'r')
+
+ cm.tmp_filename = self.cache_path(key) + '.tmp'
+ cm.tmp_file = open(cm.tmp_filename, 'w')
+
+ return cm.orig_file, cm.tmp_file
+
+ def __exit__(cm, type, value, traceback):
+ if cm.orig_file:
+ cm.orig_file.close()
+ cm.tmp_file.close()
+
+ if value:
+ # remove tmp on exception & raise it
+ shutil.rmtree(cm.tmp_filename, True)
+ raise value
+ else:
+ os.rename(cm.tmp_filename, cm.orig_filename)
+
+ return WriteTransaction(self._get_lock(key), WriteContextManager)
+
+ def mtime(self, key):
+ """Return modification time of cache file, or 0 if it does not exist.
+
+ Time is in units returned by os.stat in the mtime field, which is
+ platform-dependent.
+
+ """
+ if not self.init_entry(key):
+ return 0
+ else:
+ sinfo = os.stat(self.cache_path(key))
+ return sinfo.st_mtime
+
+ def remove(self, key):
+ lock = self._get_lock(key)
+ try:
+ lock.acquire_write()
+ os.unlink(self.cache_path(key))
+ finally:
+ lock.release_write()
+ os.unlink(self._lock_path(key))
+
+
+class CacheError(SpackError):
+ pass
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 22058d41d8..b875e9da99 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -61,7 +61,6 @@ Note that ``graph_ascii`` assumes a single spec while ``graph_dot``
can take a number of specs as input.
"""
-__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
from heapq import *
@@ -71,6 +70,8 @@ from llnl.util.tty.color import *
import spack
from spack.spec import Spec
+__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
+
def topological_sort(spec, **kwargs):
"""Topological sort for specs.
@@ -80,18 +81,21 @@ def topological_sort(spec, **kwargs):
"""
reverse = kwargs.get('reverse', False)
+ # XXX(deptype): iterate over a certain kind of dependency. Maybe color
+ # edges based on the type of dependency?
if not reverse:
- parents = lambda s: s.dependents
- children = lambda s: s.dependencies
+ parents = lambda s: s.dependents()
+ children = lambda s: s.dependencies()
else:
- parents = lambda s: s.dependencies
- children = lambda s: s.dependents
+ parents = lambda s: s.dependencies()
+ children = lambda s: s.dependents()
# Work on a copy so this is nondestructive.
spec = spec.copy()
nodes = spec.index()
topo_order = []
+ par = dict((name, parents(nodes[name])) for name in nodes.keys())
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining)
@@ -100,12 +104,12 @@ def topological_sort(spec, **kwargs):
topo_order.append(name)
node = nodes[name]
- for dep in children(node).values():
- del parents(dep)[node.name]
- if not parents(dep):
+ for dep in children(node):
+ par[dep.name].remove(node)
+ if not par[dep.name]:
heappush(remaining, dep.name)
- if any(parents(s) for s in spec.traverse()):
+ if any(par.get(s.name, []) for s in spec.traverse()):
raise ValueError("Spec has cycles!")
else:
return topo_order
@@ -130,7 +134,9 @@ def find(seq, predicate):
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
+
class AsciiGraph(object):
+
def __init__(self):
# These can be set after initialization or after a call to
# graph() to change behavior.
@@ -151,18 +157,15 @@ class AsciiGraph(object):
self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line
-
def _indent(self):
self._out.write(self.indent * ' ')
-
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge)
-
def _connect_deps(self, i, deps, label=None):
"""Connect dependencies to existing edges in the frontier.
@@ -197,7 +200,8 @@ class AsciiGraph(object):
collapse = True
if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1.
- self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j)))
+ self._back_edge_line([], j, i + 1, True,
+ label + "-1.5 " + str((i + 1, j)))
collapse = False
else:
@@ -205,19 +209,20 @@ class AsciiGraph(object):
if self._prev_state == NODE and self._prev_index < i:
i += 1
- if i-j > 1:
+ if i - j > 1:
# We need two lines to connect if distance > 1
- self._back_edge_line([], j, i, True, label + "-1 " + str((i,j)))
+ self._back_edge_line([], j, i, True,
+ label + "-1 " + str((i, j)))
collapse = False
- self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j)))
+ self._back_edge_line([j], -1, -1, collapse,
+ label + "-2 " + str((i, j)))
return True
elif deps:
self._frontier.insert(i, deps)
return False
-
def _set_state(self, state, index, label=None):
if state not in states:
raise ValueError("Invalid graph state!")
@@ -231,7 +236,6 @@ class AsciiGraph(object):
self._out.write("%-20s" % (str(label) if label else ''))
self._out.write("%s" % self._frontier)
-
def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
"""Write part of a backwards edge in the graph.
@@ -285,27 +289,26 @@ class AsciiGraph(object):
self._indent()
for p in prev_ends:
- advance(p, lambda: [("| ", self._pos)] )
- advance(p+1, lambda: [("|/", self._pos)] )
+ advance(p, lambda: [("| ", self._pos)])
+ advance(p + 1, lambda: [("|/", self._pos)])
if end >= 0:
- advance(end + 1, lambda: [("| ", self._pos)] )
- advance(start - 1, lambda: [("|", self._pos), ("_", end)] )
+ advance(end + 1, lambda: [("| ", self._pos)])
+ advance(start - 1, lambda: [("|", self._pos), ("_", end)])
else:
- advance(start - 1, lambda: [("| ", self._pos)] )
+ advance(start - 1, lambda: [("| ", self._pos)])
if start >= 0:
- advance(start, lambda: [("|", self._pos), ("/", end)] )
+ advance(start, lambda: [("|", self._pos), ("/", end)])
if collapse:
- advance(flen, lambda: [(" /", self._pos)] )
+ advance(flen, lambda: [(" /", self._pos)])
else:
- advance(flen, lambda: [("| ", self._pos)] )
+ advance(flen, lambda: [("| ", self._pos)])
self._set_state(BACK_EDGE, end, label)
self._out.write("\n")
-
def _node_line(self, index, name):
"""Writes a line with a node at index."""
self._indent()
@@ -314,14 +317,13 @@ class AsciiGraph(object):
self._out.write("%s " % self.node_character)
- for c in range(index+1, len(self._frontier)):
+ for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c)
self._out.write(" %s" % name)
self._set_state(NODE, index)
self._out.write("\n")
-
def _collapse_line(self, index):
"""Write a collapsing line after a node was added at index."""
self._indent()
@@ -333,36 +335,33 @@ class AsciiGraph(object):
self._set_state(COLLAPSE, index)
self._out.write("\n")
-
def _merge_right_line(self, index):
"""Edge at index is same as edge to right. Merge directly with '\'"""
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
- self._write_edge("\\", index+1)
- for c in range(index+1, len(self._frontier)):
- self._write_edge("| ", c )
+ self._write_edge("\\", index + 1)
+ for c in range(index + 1, len(self._frontier)):
+ self._write_edge("| ", c)
self._set_state(MERGE_RIGHT, index)
self._out.write("\n")
-
def _expand_right_line(self, index):
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
- self._write_edge("\\", index+1)
+ self._write_edge("\\", index + 1)
- for c in range(index+2, len(self._frontier)):
+ for c in range(index + 2, len(self._frontier)):
self._write_edge(" \\", c)
self._set_state(EXPAND_RIGHT, index)
self._out.write("\n")
-
def write(self, spec, **kwargs):
"""Write out an ascii graph of the provided spec.
@@ -396,7 +395,7 @@ class AsciiGraph(object):
# Colors associated with each node in the DAG.
# Edges are colored by the node they point to.
self._name_to_color = dict((name, self.colors[i % len(self.colors)])
- for i, name in enumerate(topo_order))
+ for i, name in enumerate(topo_order))
# Frontier tracks open edges of the graph as it's written out.
self._frontier = [[spec.name]]
@@ -405,7 +404,8 @@ class AsciiGraph(object):
i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0:
- # Expand frontier until there are enough columns for all children.
+ # Expand frontier until there are enough columns for all
+ # children.
# Figure out how many back connections there are and
# sort them so we do them in order
@@ -422,8 +422,9 @@ class AsciiGraph(object):
prev_ends = []
for j, (b, d) in enumerate(back):
self._frontier[i].remove(d)
- if i-b > 1:
- self._back_edge_line(prev_ends, b, i, False, 'left-1')
+ if i - b > 1:
+ self._back_edge_line(prev_ends, b, i, False,
+ 'left-1')
del prev_ends[:]
prev_ends.append(b)
@@ -437,12 +438,13 @@ class AsciiGraph(object):
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
- if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1
- and self._frontier[i+1][0] in self._frontier[i]):
+ if (i + 1 < len(self._frontier) and
+ len(self._frontier[i + 1]) == 1 and
+ self._frontier[i + 1][0] in self._frontier[i]):
# We need to connect to the element to the right.
# Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract.
- name = self._frontier[i+1][0]
+ name = self._frontier[i + 1][0]
self._frontier[i].remove(name)
self._merge_right_line(i)
@@ -456,9 +458,8 @@ class AsciiGraph(object):
self._frontier.pop(i)
self._connect_deps(i, deps, "post-expand")
-
# Handle any remaining back edges to the right
- j = i+1
+ j = i + 1
while j < len(self._frontier):
deps = self._frontier.pop(j)
if not self._connect_deps(j, deps, "back-from-right"):
@@ -475,9 +476,10 @@ class AsciiGraph(object):
# Replace node with its dependencies
self._frontier.pop(i)
- if node.dependencies:
- deps = sorted((d for d in node.dependencies), reverse=True)
- self._connect_deps(i, deps, "new-deps") # anywhere.
+ if node.dependencies():
+ deps = sorted((d.name for d in node.dependencies()),
+ reverse=True)
+ self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
self._collapse_line(i)
@@ -499,7 +501,6 @@ def graph_ascii(spec, **kwargs):
graph.write(spec, color=color, out=out)
-
def graph_dot(*specs, **kwargs):
"""Generate a graph in dot format of all provided specs.
diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py
index 902e488eca..c7c84defa0 100644
--- a/lib/spack/spack/hooks/__init__.py
+++ b/lib/spack/spack/hooks/__init__.py
@@ -45,6 +45,7 @@ from llnl.util.lang import memoized, list_modules
from llnl.util.filesystem import join_path
import spack
+
@memoized
def all_hook_modules():
modules = []
@@ -58,6 +59,7 @@ def all_hook_modules():
class HookRunner(object):
+
def __init__(self, hook_name):
self.hook_name = hook_name
diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py
index bcbd68dfa0..070b309a43 100644
--- a/lib/spack/spack/hooks/extensions.py
+++ b/lib/spack/spack/hooks/extensions.py
@@ -23,8 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import spack
-
def pre_uninstall(pkg):
assert(pkg.spec.concrete)
diff --git a/lib/spack/spack/hooks/licensing.py b/lib/spack/spack/hooks/licensing.py
index 0f63b0e05a..9010b84154 100644
--- a/lib/spack/spack/hooks/licensing.py
+++ b/lib/spack/spack/hooks/licensing.py
@@ -26,7 +26,7 @@ import os
import spack
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
+from llnl.util.filesystem import join_path, mkdirp
def pre_install(pkg):
@@ -154,6 +154,9 @@ def symlink_license(pkg):
target = pkg.global_license_file
for filename in pkg.license_files:
link_name = join_path(pkg.prefix, filename)
+ license_dir = os.path.dirname(link_name)
+ if not os.path.exists(license_dir):
+ mkdirp(license_dir)
if os.path.exists(target):
os.symlink(target, link_name)
tty.msg("Added local symlink %s to global license file" %
diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py
index 83d67ea225..02c1ce3816 100644
--- a/lib/spack/spack/hooks/sbang.py
+++ b/lib/spack/spack/hooks/sbang.py
@@ -23,8 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import stat
+import re
-from llnl.util.filesystem import *
import llnl.util.tty as tty
import spack
@@ -34,6 +35,7 @@ import spack.modules
# here, as it is the shortest I could find on a modern OS.
shebang_limit = 127
+
def shebang_too_long(path):
"""Detects whether a file has a shebang line that is too long."""
with open(path, 'r') as script:
@@ -57,17 +59,26 @@ def filter_shebang(path):
if original.startswith(new_sbang_line):
return
- backup = path + ".shebang.bak"
- os.rename(path, backup)
+ # Use --! instead of #! on second line for lua.
+ if re.search(r'^#!(/[^/]*)*lua\b', original):
+ original = re.sub(r'^#', '--', original)
+
+ # Change non-writable files to be writable if needed.
+ saved_mode = None
+ if not os.access(path, os.W_OK):
+ st = os.stat(path)
+ saved_mode = st.st_mode
+ os.chmod(path, saved_mode | stat.S_IWRITE)
with open(path, 'w') as new_file:
new_file.write(new_sbang_line)
new_file.write(original)
- copy_mode(backup, path)
- unset_executable_mode(backup)
+ # Restore original permissions.
+ if saved_mode is not None:
+ os.chmod(path, saved_mode)
- tty.warn("Patched overly long shebang in %s" % path)
+ tty.warn("Patched overlong shebang in %s" % path)
def filter_shebangs_in_directory(directory):
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 0bbcfba6b4..f053e4405f 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -40,9 +40,8 @@ import spack.error
import spack.url as url
import spack.fetch_strategy as fs
from spack.spec import Spec
-from spack.stage import Stage
from spack.version import *
-from spack.util.compression import extension, allowed_archive
+from spack.util.compression import allowed_archive
def mirror_archive_filename(spec, fetcher):
@@ -52,10 +51,10 @@ def mirror_archive_filename(spec, fetcher):
if isinstance(fetcher, fs.URLFetchStrategy):
if fetcher.expand_archive:
- # If we fetch this version with a URLFetchStrategy, use URL's archive type
+ # If we fetch with a URLFetchStrategy, use URL's archive type
ext = url.downloaded_file_extension(fetcher.url)
else:
- # If the archive shouldn't be expanded, don't check for its extension.
+ # If the archive shouldn't be expanded, don't check extension.
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
@@ -106,7 +105,9 @@ def get_matching_versions(specs, **kwargs):
def suggest_archive_basename(resource):
"""
- Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types.
+ Return a tentative basename for an archive.
+
+ Raises an exception if the name is not an allowed archive type.
:param fetcher:
:return:
@@ -170,7 +171,7 @@ def create(path, specs, **kwargs):
'error': []
}
- # Iterate through packages and download all the safe tarballs for each of them
+ # Iterate through packages and download all safe tarballs for each
for spec in version_specs:
add_single_spec(spec, mirror_root, categories, **kwargs)
@@ -190,12 +191,15 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
fetcher = stage.fetcher
if ii == 0:
# create a subdirectory for the current package@version
- archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher)))
+ archive_path = os.path.abspath(join_path(
+ mirror_root, mirror_archive_path(spec, fetcher)))
name = spec.format("$_$@")
else:
resource = stage.resource
- archive_path = join_path(subdir, suggest_archive_basename(resource))
- name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@"))
+ archive_path = join_path(
+ subdir, suggest_archive_basename(resource))
+ name = "{resource} ({pkg}).".format(
+ resource=resource.name, pkg=spec.format("$_$@"))
subdir = os.path.dirname(archive_path)
mkdirp(subdir)
@@ -217,15 +221,18 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
categories['present'].append(spec)
else:
categories['mirrored'].append(spec)
+
except Exception as e:
if spack.debug:
sys.excepthook(*sys.exc_info())
else:
- tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message)
+ tty.warn("Error while fetching %s"
+ % spec.format('$_$@'), e.message)
categories['error'].append(spec)
class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors."""
+
def __init__(self, msg, long_msg=None):
super(MirrorError, self).__init__(msg, long_msg)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index d2b819e80a..debc6752b4 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -120,7 +120,7 @@ def dependencies(spec, request='all'):
return []
if request == 'direct':
- return [xx for _, xx in spec.dependencies.items()]
+ return spec.dependencies()
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits
@@ -188,6 +188,8 @@ def parse_config_options(module_generator):
#####
# Automatic loading loads
+ module_file_actions['hash_length'] = module_configuration.get(
+ 'hash_length', 7)
module_file_actions['autoload'] = dependencies(
module_generator.spec, module_file_actions.get('autoload', 'none'))
# Prerequisites
@@ -237,6 +239,7 @@ class EnvModule(object):
formats = {}
class __metaclass__(type):
+
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
if cls.name != 'env_module' and cls.name in CONFIGURATION[
@@ -269,14 +272,26 @@ class EnvModule(object):
@property
def tokens(self):
+ """Tokens that can be substituted in environment variable values
+ and naming schemes
+ """
tokens = {
'name': self.spec.name,
'version': self.spec.version,
- 'compiler': self.spec.compiler
+ 'compiler': self.spec.compiler,
+ 'prefix': self.spec.package.prefix
}
return tokens
@property
+ def upper_tokens(self):
+ """Tokens that can be substituted in environment variable names"""
+ upper_tokens = {
+ 'name': self.spec.name.replace('-', '_').upper()
+ }
+ return upper_tokens
+
+ @property
def use_name(self):
"""
Subclasses should implement this to return the name the module command
@@ -285,11 +300,20 @@ class EnvModule(object):
naming_tokens = self.tokens
naming_scheme = self.naming_scheme
name = naming_scheme.format(**naming_tokens)
- name += '-' + self.spec.dag_hash(
- ) # Always append the hash to make the module file unique
# Not everybody is working on linux...
parts = name.split('/')
name = join_path(*parts)
+ # Add optional suffixes based on constraints
+ configuration, _ = parse_config_options(self)
+ suffixes = [name]
+ for constraint, suffix in configuration.get('suffixes', {}).items():
+ if constraint in self.spec:
+ suffixes.append(suffix)
+ # Always append the hash to make the module file unique
+ hash_length = configuration.pop('hash_length', 7)
+ if hash_length != 0:
+ suffixes.append(self.spec.dag_hash(length=hash_length))
+ name = '-'.join(suffixes)
return name
@property
@@ -331,7 +355,7 @@ class EnvModule(object):
return False
- def write(self):
+ def write(self, overwrite=False):
"""
Writes out a module file for this object.
@@ -381,6 +405,8 @@ class EnvModule(object):
for x in filter_blacklisted(
module_configuration.pop('autoload', []), self.name):
module_file_content += self.autoload(x)
+ for x in module_configuration.pop('load', []):
+ module_file_content += self.autoload(x)
for x in filter_blacklisted(
module_configuration.pop('prerequisites', []), self.name):
module_file_content += self.prerequisite(x)
@@ -390,6 +416,15 @@ class EnvModule(object):
for line in self.module_specific_content(module_configuration):
module_file_content += line
+ # Print a warning in case I am accidentally overwriting
+ # a module file that is already there (name clash)
+ if not overwrite and os.path.exists(self.file_name):
+ message = 'Module file already exists : skipping creation\n'
+ message += 'file : {0.file_name}\n'
+ message += 'spec : {0.spec}'
+ tty.warn(message.format(self))
+ return
+
# Dump to file
with open(self.file_name, 'w') as f:
f.write(module_file_content)
@@ -402,8 +437,12 @@ class EnvModule(object):
return tuple()
def autoload(self, spec):
- m = type(self)(spec)
- return self.autoload_format.format(module_file=m.use_name)
+ if not isinstance(spec, str):
+ m = type(self)(spec)
+ module_file = m.use_name
+ else:
+ module_file = spec
+ return self.autoload_format.format(module_file=module_file)
def prerequisite(self, spec):
m = type(self)(spec)
@@ -411,11 +450,17 @@ class EnvModule(object):
def process_environment_command(self, env):
for command in env:
+ # Token expansion from configuration file
+ name = command.args.get('name', '').format(**self.upper_tokens)
+ value = str(command.args.get('value', '')).format(**self.tokens)
+ command.update_args(name=name, value=value)
+ # Format the line int the module file
try:
yield self.environment_modifications_formats[type(
command)].format(**command.args)
except KeyError:
- message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501
+ message = ('Cannot handle command of type {command}: '
+ 'skipping request')
details = '{context} at {filename}:{lineno}'
tty.warn(message.format(command=type(command)))
tty.warn(details.format(**command.args))
@@ -441,20 +486,21 @@ class EnvModule(object):
class Dotkit(EnvModule):
name = 'dotkit'
- path = join_path(spack.share_path, "dotkit")
-
+ path = join_path(spack.share_path, 'dotkit')
environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n',
+ RemovePath: 'dk_unalter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n'
}
autoload_format = 'dk_op {module_file}\n'
- default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
+ default_naming_format = \
+ '{name}-{version}-{compiler.name}-{compiler.version}'
@property
def file_name(self):
- return join_path(Dotkit.path, self.spec.architecture,
+ return join_path(self.path, self.spec.architecture,
'%s.dk' % self.use_name)
@property
@@ -476,18 +522,18 @@ class Dotkit(EnvModule):
def prerequisite(self, spec):
tty.warn('prerequisites: not supported by dotkit module files')
- tty.warn('\tYou may want to check ~/.spack/modules.yaml')
+ tty.warn('\tYou may want to check %s/modules.yaml'
+ % spack.user_config_path)
return ''
class TclModule(EnvModule):
name = 'tcl'
path = join_path(spack.share_path, "modules")
-
environment_modifications_formats = {
- PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
- AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
- RemovePath: 'remove-path --delim "{delim}" {name} \"{value}\"\n',
+ PrependPath: 'prepend-path --delim "{separator}" {name} \"{value}\"\n',
+ AppendPath: 'append-path --delim "{separator}" {name} \"{value}\"\n',
+ RemovePath: 'remove-path --delim "{separator}" {name} \"{value}\"\n',
SetEnv: 'setenv {name} \"{value}\"\n',
UnsetEnv: 'unsetenv {name}\n'
}
@@ -499,18 +545,19 @@ class TclModule(EnvModule):
prerequisite_format = 'prereq {module_file}\n'
- default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
+ default_naming_format = \
+ '{name}-{version}-{compiler.name}-{compiler.version}'
@property
def file_name(self):
- return join_path(TclModule.path, self.spec.architecture, self.use_name)
+ return join_path(self.path, self.spec.architecture, self.use_name)
@property
def header(self):
timestamp = datetime.datetime.now()
# TCL Modulefile header
header = '#%Module1.0\n'
- header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
+ header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp
header += '##\n'
header += '## %s\n' % self.spec.short_spec
header += '##\n'
@@ -540,10 +587,12 @@ class TclModule(EnvModule):
for naming_dir, conflict_dir in zip(
self.naming_scheme.split('/'), item.split('/')):
if naming_dir != conflict_dir:
- message = 'conflict scheme does not match naming scheme [{spec}]\n\n' # NOQA: ignore=E501
+ message = 'conflict scheme does not match naming '
+ message += 'scheme [{spec}]\n\n'
message += 'naming scheme : "{nformat}"\n'
message += 'conflict scheme : "{cformat}"\n\n'
- message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501
+ message += '** You may want to check your '
+ message += '`modules.yaml` configuration file **\n'
tty.error(message.format(spec=self.spec,
nformat=self.naming_scheme,
cformat=item))
diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py
index 170ef3cea2..d1d1f32445 100644
--- a/lib/spack/spack/multimethod.py
+++ b/lib/spack/spack/multimethod.py
@@ -43,15 +43,13 @@ avoids overly complicated rat nests of if statements. Obviously,
depending on the scenario, regular old conditionals might be clearer,
so package authors should use their judgement.
"""
-import sys
import functools
-import collections
from llnl.util.lang import *
import spack.architecture
import spack.error
-from spack.spec import parse_anonymous_spec, Spec
+from spack.spec import parse_anonymous_spec
class SpecMultiMethod(object):
@@ -89,13 +87,13 @@ class SpecMultiMethod(object):
See the docs for decorators below for more details.
"""
+
def __init__(self, default=None):
self.method_list = []
self.default = default
if default:
functools.update_wrapper(self, default)
-
def register(self, spec, method):
"""Register a version of a method for a particular sys_type."""
self.method_list.append((spec, method))
@@ -105,12 +103,10 @@ class SpecMultiMethod(object):
else:
assert(self.__name__ == method.__name__)
-
def __get__(self, obj, objtype):
"""This makes __call__ support instance methods."""
return functools.partial(self.__call__, obj)
-
def __call__(self, package_self, *args, **kwargs):
"""Find the first method with a spec that matches the
package's spec. If none is found, call the default
@@ -127,7 +123,6 @@ class SpecMultiMethod(object):
type(package_self), self.__name__, spec,
[m[0] for m in self.method_list])
-
def __str__(self):
return "SpecMultiMethod {\n\tdefault: %s,\n\tspecs: %s\n}" % (
self.default, self.method_list)
@@ -149,7 +144,7 @@ class when(object):
@when('arch=chaos_5_x86_64_ib')
def install(self, prefix):
# This will be executed instead of the default install if
- # the package's sys_type() is chaos_5_x86_64_ib.
+ # the package's platform() is chaos_5_x86_64_ib.
@when('arch=bgqos_0")
def install(self, prefix):
@@ -195,11 +190,13 @@ class when(object):
platform-specific versions. There's not much we can do to get
around this because of the way decorators work.
"""
+
def __init__(self, spec):
pkg = get_calling_module_name()
if spec is True:
spec = pkg
- self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None
+ self.spec = (parse_anonymous_spec(spec, pkg)
+ if spec is not False else None)
def __call__(self, method):
# Get the first definition of the method in the calling scope
@@ -218,12 +215,14 @@ class when(object):
class MultiMethodError(spack.error.SpackError):
"""Superclass for multimethod dispatch errors"""
+
def __init__(self, message):
super(MultiMethodError, self).__init__(message)
class NoSuchMethodError(spack.error.SpackError):
"""Raised when we can't find a version of a multi-method."""
+
def __init__(self, cls, method_name, spec, possible_specs):
super(NoSuchMethodError, self).__init__(
"Package %s does not support %s called with %s. Options are: %s"
diff --git a/lib/spack/spack/operating_systems/__init__.py b/lib/spack/spack/operating_systems/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/spack/operating_systems/__init__.py
diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py
new file mode 100644
index 0000000000..78807865b3
--- /dev/null
+++ b/lib/spack/spack/operating_systems/cnl.py
@@ -0,0 +1,69 @@
+import re
+import os
+
+from spack.architecture import OperatingSystem
+from spack.util.executable import *
+import spack.spec
+from spack.util.multiproc import parmap
+import spack.compilers
+
+
+class Cnl(OperatingSystem):
+ """ Compute Node Linux (CNL) is the operating system used for the Cray XC
+ series super computers. It is a very stripped down version of GNU/Linux.
+ Any compilers found through this operating system will be used with
+ modules. If updated, user must make sure that version and name are
+ updated to indicate that OS has been upgraded (or downgraded)
+ """
+
+ def __init__(self):
+ name = 'CNL'
+ version = '10'
+ super(Cnl, self).__init__(name, version)
+
+ def __str__(self):
+ return self.name
+
+ def find_compilers(self, *paths):
+ types = spack.compilers.all_compiler_types()
+ compiler_lists = parmap(
+ lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
+
+ # ensure all the version calls we made are cached in the parent
+ # process, as well. This speeds up Spack a lot.
+ clist = reduce(lambda x, y: x + y, compiler_lists)
+ return clist
+
+ def find_compiler(self, cmp_cls, *paths):
+ compilers = []
+ if cmp_cls.PrgEnv:
+ if not cmp_cls.PrgEnv_compiler:
+ tty.die('Must supply PrgEnv_compiler with PrgEnv')
+
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Save the environment variable to restore later
+ old_modulepath = os.environ['MODULEPATH']
+ # if given any explicit paths, search them for module files too
+ if paths:
+ module_paths = ':' + ':'.join(p for p in paths)
+ os.environ['MODULEPATH'] = module_paths
+
+ output = modulecmd(
+ 'avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
+ matches = re.findall(
+ r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler, output)
+ for name, version in matches:
+ v = version
+ comp = cmp_cls(
+ spack.spec.CompilerSpec(name + '@' + v), self,
+ ['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v])
+
+ compilers.append(comp)
+
+ # Restore modulepath environment variable
+ if paths:
+ os.environ['MODULEPATH'] = old_modulepath
+
+ return compilers
diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py
new file mode 100644
index 0000000000..6d70ae80b6
--- /dev/null
+++ b/lib/spack/spack/operating_systems/linux_distro.py
@@ -0,0 +1,24 @@
+import re
+import platform as py_platform
+from spack.architecture import OperatingSystem
+
+
+class LinuxDistro(OperatingSystem):
+ """ This class will represent the autodetected operating system
+ for a Linux System. Since there are many different flavors of
+ Linux, this class will attempt to encompass them all through
+ autodetection using the python module platform and the method
+ platform.dist()
+ """
+
+ def __init__(self):
+ distname, version, _ = py_platform.linux_distribution(
+ full_distribution_name=False)
+
+ # Grabs major version from tuple on redhat; on other platforms
+ # grab the first legal identifier in the version field. On
+ # debian you get things like 'wheezy/sid'; sid means unstable.
+ # We just record 'wheezy' and don't get quite so detailed.
+ version = re.split(r'[^\w-]', version)[0]
+
+ super(LinuxDistro, self).__init__(distname, version)
diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py
new file mode 100644
index 0000000000..3e5ab9b2e9
--- /dev/null
+++ b/lib/spack/spack/operating_systems/mac_os.py
@@ -0,0 +1,30 @@
+import platform as py_platform
+from spack.architecture import OperatingSystem
+
+
+class MacOs(OperatingSystem):
+ """This class represents the macOS operating system. This will be
+ auto detected using the python platform.mac_ver. The macOS
+ platform will be represented using the major version operating
+ system name, i.e el capitan, yosemite...etc.
+ """
+
+ def __init__(self):
+ """ Autodetects the mac version from a dictionary. Goes back as
+ far as 10.6 snowleopard. If the user has an older mac then
+ the version will just be a generic mac_os.
+ """
+ mac_releases = {'10.6': "snowleopard",
+ "10.7": "lion",
+ "10.8": "mountainlion",
+ "10.9": "mavericks",
+ "10.10": "yosemite",
+ "10.11": "elcapitan",
+ "10.12": "sierra"}
+
+ mac_ver = py_platform.mac_ver()[0][:-2]
+ name = mac_releases.get(mac_ver, "macos")
+ super(MacOs, self).__init__(name, mac_ver)
+
+ def __str__(self):
+ return self.name
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 2e7d8a7709..ff8c8e96bc 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -34,9 +34,11 @@ rundown on spack and how it differs from homebrew, look at the
README.
"""
import os
+import sys
import re
import textwrap
import time
+import string
import llnl.util.tty as tty
import spack
@@ -50,6 +52,8 @@ import spack.mirror
import spack.repository
import spack.url
import spack.util.web
+
+from urlparse import urlparse
from StringIO import StringIO
from llnl.util.filesystem import *
from llnl.util.lang import *
@@ -58,9 +62,11 @@ from llnl.util.tty.log import log_output
from spack.stage import Stage, ResourceStage, StageComposite
from spack.util.compression import allowed_archive
from spack.util.environment import dump_environment
-from spack.util.executable import ProcessError
+from spack.util.executable import ProcessError, which
from spack.version import *
-from urlparse import urlparse
+from spack import directory_layout
+
+
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
@@ -173,12 +179,10 @@ class Package(object):
Most software comes in nicely packaged tarballs, like this one:
http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
-
Taking a page from homebrew, spack deduces pretty much everything it
needs to know from the URL above. If you simply type this:
spack create http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
-
Spack will download the tarball, generate an md5 hash, figure out the
version and the name of the package from the URL, and create a new
package file for you with all the names and attributes set correctly.
@@ -305,6 +309,8 @@ class Package(object):
parallel = True
"""# jobs to use for parallel make. If set, overrides default of ncpus."""
make_jobs = None
+ """By default do not run tests within package's install()"""
+ run_tests = False
"""Most packages are NOT extendable. Set to True if you want extensions."""
extendable = False
"""List of prefix-relative file paths (or a single path). If these do
@@ -398,13 +404,24 @@ class Package(object):
spack.repo.get(self.extendee_spec)._check_extendable()
@property
+ def package_dir(self):
+ """Return the directory where the package.py file lives."""
+ return os.path.dirname(self.module.__file__)
+
+ @property
+ def global_license_dir(self):
+ """Returns the directory where global license files for all
+ packages are stored."""
+ spack_root = ancestor(__file__, 4)
+ return join_path(spack_root, 'etc', 'spack', 'licenses')
+
+ @property
def global_license_file(self):
- """Returns the path where a global license file should be stored."""
+ """Returns the path where a global license file for this
+ particular package should be stored."""
if not self.license_files:
return
- spack_root = ancestor(__file__, 4)
- global_license_dir = join_path(spack_root, 'etc', 'spack', 'licenses')
- return join_path(global_license_dir, self.name,
+ return join_path(self.global_license_dir, self.name,
os.path.basename(self.license_files[0]))
@property
@@ -545,6 +562,11 @@ class Package(object):
def fetcher(self, f):
self._fetcher = f
+ def dependencies_of_type(self, *deptypes):
+ """Get subset of the dependencies with certain types."""
+ return dict((name, conds) for name, conds in self.dependencies.items()
+ if any(d in self._deptypes[name] for d in deptypes))
+
@property
def extendee_spec(self):
"""
@@ -557,7 +579,7 @@ class Package(object):
name = next(iter(self.extendees))
# If the extendee is in the spec's deps already, return that.
- for dep in self.spec.traverse():
+ for dep in self.spec.traverse(deptypes=('link', 'run')):
if name == dep.name:
return dep
@@ -607,49 +629,12 @@ class Package(object):
exts = spack.install_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
- def preorder_traversal(self, visited=None, **kwargs):
- """This does a preorder traversal of the package's dependence DAG."""
- virtual = kwargs.get("virtual", False)
-
- if visited is None:
- visited = set()
-
- if self.name in visited:
- return
- visited.add(self.name)
-
- if not virtual:
- yield self
-
- for name in sorted(self.dependencies.keys()):
- spec = self.dependencies[name]
-
- # currently, we do not descend into virtual dependencies, as this
- # makes doing a sensible traversal much harder. We just assume
- # that ANY of the virtual deps will work, which might not be true
- # (due to conflicts or unsatisfiable specs). For now this is ok
- # but we might want to reinvestigate if we start using a lot of
- # complicated virtual dependencies
- # TODO: reinvestigate this.
- if spec.virtual:
- if virtual:
- yield spec
- continue
-
- for pkg in spack.repo.get(name).preorder_traversal(visited,
- **kwargs):
- yield pkg
-
def provides(self, vpkg_name):
"""
True if this package provides a virtual package with the specified name
"""
return any(s.name == vpkg_name for s in self.provided)
- def virtual_dependencies(self, visited=None):
- for spec in sorted(set(self.preorder_traversal(virtual=True))):
- yield spec
-
@property
def installed(self):
return os.path.isdir(self.prefix)
@@ -665,7 +650,9 @@ class Package(object):
for spec in spack.installed_db.query():
if self.name == spec.name:
continue
- for dep in spec.traverse():
+ # XXX(deptype): Should build dependencies not count here?
+ # for dep in spec.traverse(deptype=('run')):
+ for dep in spec.traverse(deptype=spack.alldeps):
if self.spec == dep:
dependents.append(spec)
return dependents
@@ -676,11 +663,13 @@ class Package(object):
return self.spec.prefix
@property
+ # TODO: Change this to architecture
def compiler(self):
"""Get the spack.compiler.Compiler object used to build this package"""
if not self.spec.concrete:
raise ValueError("Can only get a compiler for a concrete package.")
- return spack.compilers.compiler_for_spec(self.spec.compiler)
+ return spack.compilers.compiler_for_spec(self.spec.compiler,
+ self.spec.architecture)
def url_version(self, version):
"""
@@ -715,13 +704,13 @@ class Package(object):
# Ask the user whether to skip the checksum if we're
# interactive, but just fail if non-interactive.
- checksum_msg = "Add a checksum or use --no-checksum to skip this check." # NOQA: ignore=E501
+ ck_msg = "Add a checksum or use --no-checksum to skip this check."
ignore_checksum = False
if sys.stdout.isatty():
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
default=False)
if ignore_checksum:
- tty.msg("Fetching with no checksum.", checksum_msg)
+ tty.msg("Fetching with no checksum.", ck_msg)
if not ignore_checksum:
raise FetchError("Will not fetch %s" %
@@ -734,6 +723,8 @@ class Package(object):
if spack.do_checksum and self.version in self.versions:
self.stage.check()
+ self.stage.cache_local()
+
def do_stage(self, mirror_only=False):
"""Unpacks the fetched tarball, then changes into the expanded tarball
directory."""
@@ -850,6 +841,8 @@ class Package(object):
resource_stage_folder = '-'.join(pieces)
return resource_stage_folder
+ install_phases = set(['configure', 'build', 'install', 'provenance'])
+
def do_install(self,
keep_prefix=False,
keep_stage=False,
@@ -857,8 +850,11 @@ class Package(object):
skip_patch=False,
verbose=False,
make_jobs=None,
+ run_tests=False,
fake=False,
- explicit=False):
+ explicit=False,
+ dirty=False,
+ install_phases=install_phases):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
@@ -874,10 +870,13 @@ class Package(object):
fake -- Don't really build -- install fake stub files instead.
skip_patch -- Skip patch stage of build if True.
verbose -- Display verbose build output (by default, suppresses it)
+ dirty -- Don't clean the build environment before installing.
make_jobs -- Number of make jobs to use for install. Default is ncpus
+ run_tests -- Runn tests within the package's install()
"""
if not self.spec.concrete:
- raise ValueError("Can only install concrete packages.")
+ raise ValueError("Can only install concrete packages: %s."
+ % self.spec.name)
# No installation needed if package is external
if self.spec.external:
@@ -886,7 +885,8 @@ class Package(object):
return
# Ensure package is not already installed
- if spack.install_layout.check_installed(self.spec):
+ layout = spack.install_layout
+ if 'install' in install_phases and layout.check_installed(self.spec):
tty.msg("%s is already installed in %s" % (self.name, self.prefix))
rec = spack.installed_db.get_record(self.spec)
if (not rec.explicit) and explicit:
@@ -905,7 +905,11 @@ class Package(object):
fake=fake,
skip_patch=skip_patch,
verbose=verbose,
- make_jobs=make_jobs)
+ make_jobs=make_jobs,
+ run_tests=run_tests)
+
+ # Set run_tests flag before starting build.
+ self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
@@ -925,6 +929,10 @@ class Package(object):
tty.msg("Building %s" % self.name)
self.stage.keep = keep_stage
+ self.install_phases = install_phases
+ self.build_directory = join_path(self.stage.path, 'spack-build')
+ self.source_directory = self.stage.source_path
+
with self.stage:
# Run the pre-install hook in the child process after
# the directory is created.
@@ -956,19 +964,26 @@ class Package(object):
raise e
# Ensure that something was actually installed.
- self.sanity_check_prefix()
+ if 'install' in self.install_phases:
+ self.sanity_check_prefix()
# Copy provenance into the install directory on success
- log_install_path = spack.install_layout.build_log_path(
- self.spec)
- env_install_path = spack.install_layout.build_env_path(
- self.spec)
- packages_dir = spack.install_layout.build_packages_path(
- self.spec)
-
- install(log_path, log_install_path)
- install(env_path, env_install_path)
- dump_packages(self.spec, packages_dir)
+ if 'provenance' in self.install_phases:
+ log_install_path = layout.build_log_path(self.spec)
+ env_install_path = layout.build_env_path(self.spec)
+ packages_dir = layout.build_packages_path(self.spec)
+
+ # Remove first if we're overwriting another build
+ # (can happen with spack setup)
+ try:
+ # log_install_path and env_install_path are here
+ shutil.rmtree(packages_dir)
+ except:
+ pass
+
+ install(log_path, log_install_path)
+ install(env_path, env_install_path)
+ dump_packages(self.spec, packages_dir)
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
@@ -986,7 +1001,19 @@ class Package(object):
try:
# Create the install prefix and fork the build process.
spack.install_layout.create_install_directory(self.spec)
- spack.build_environment.fork(self, build_process)
+ except directory_layout.InstallDirectoryAlreadyExistsError:
+ if 'install' in install_phases:
+ # Abort install if install directory exists.
+ # But do NOT remove it (you'd be overwriting someone's data)
+ tty.warn("Keeping existing install prefix in place.")
+ raise
+ else:
+ # We're not installing anyway, so don't worry if someone
+ # else has already written in the install directory
+ pass
+
+ try:
+ spack.build_environment.fork(self, build_process, dirty=dirty)
except:
# remove the install prefix if anything went wrong during install.
if not keep_prefix:
@@ -996,7 +1023,7 @@ class Package(object):
"Spack will think this package is installed. " +
"Manually remove this directory to fix:",
self.prefix,
- wrap=True)
+ wrap=False)
raise
# note: PARENT of the build process adds the new package to
@@ -1028,7 +1055,7 @@ class Package(object):
def do_install_dependencies(self, **kwargs):
# Pass along paths of dependencies here
- for dep in self.spec.dependencies.values():
+ for dep in self.spec.dependencies():
dep.package.do_install(**kwargs)
@property
@@ -1170,7 +1197,15 @@ class Package(object):
def do_uninstall(self, force=False):
if not self.installed:
- raise InstallError(str(self.spec) + " is not installed.")
+ # prefix may not exist, but DB may be inconsistent. Try to fix by
+ # removing, but omit hooks.
+ specs = spack.installed_db.query(self.spec, installed=True)
+ if specs:
+ spack.installed_db.remove(specs[0])
+ tty.msg("Removed stale DB entry for %s" % self.spec.short_spec)
+ return
+ else:
+ raise InstallError(str(self.spec) + " is not installed.")
if not force:
dependents = self.installed_dependents
@@ -1209,7 +1244,7 @@ class Package(object):
(self.name, self.extendee.name))
def do_activate(self, force=False):
- """Called on an etension to invoke the extendee's activate method.
+ """Called on an extension to invoke the extendee's activate method.
Commands should call this routine, and should not call
activate() directly.
@@ -1221,7 +1256,7 @@ class Package(object):
# Activate any package dependencies that are also extensions.
if not force:
- for spec in self.spec.traverse(root=False):
+ for spec in self.spec.traverse(root=False, deptype='run'):
if spec.package.extends(self.extendee_spec):
if not spec.package.activated:
spec.package.do_activate(force=force)
@@ -1267,11 +1302,12 @@ class Package(object):
for name, aspec in activated.items():
if aspec == self.spec:
continue
- for dep in aspec.traverse():
+ for dep in aspec.traverse(deptype='run'):
if self.spec == dep:
+ msg = ("Cannot deactivate %s because %s is activated "
+ "and depends on it.")
raise ActivationError(
- "Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501
- % (self.spec.short_spec, aspec.short_spec))
+ msg % (self.spec.short_spec, aspec.short_spec))
self.extendee_spec.package.deactivate(self, **self.extendee_args)
@@ -1353,9 +1389,10 @@ class Package(object):
def rpath(self):
"""Get the rpath this package links with, as a list of paths."""
rpaths = [self.prefix.lib, self.prefix.lib64]
- rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
+ deps = self.spec.dependencies(deptype='link')
+ rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
+ rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
return rpaths
@@ -1372,6 +1409,14 @@ def install_dependency_symlinks(pkg, spec, prefix):
flatten_dependencies(spec, prefix)
+def use_cray_compiler_names():
+ """Compiler names for builds that rely on cray compiler names."""
+ os.environ['CC'] = 'cc'
+ os.environ['CXX'] = 'CC'
+ os.environ['FC'] = 'ftn'
+ os.environ['F77'] = 'ftn'
+
+
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
@@ -1469,6 +1514,166 @@ def _hms(seconds):
return ' '.join(parts)
+class StagedPackage(Package):
+ """A Package subclass where the install() is split up into stages."""
+
+ def install_setup(self):
+ """Creates a spack_setup.py script to configure the package later."""
+ raise InstallError(
+ "Package %s provides no install_setup() method!" % self.name)
+
+ def install_configure(self):
+ """Runs the configure process."""
+ raise InstallError(
+ "Package %s provides no install_configure() method!" % self.name)
+
+ def install_build(self):
+ """Runs the build process."""
+ raise InstallError(
+ "Package %s provides no install_build() method!" % self.name)
+
+ def install_install(self):
+ """Runs the install process."""
+ raise InstallError(
+ "Package %s provides no install_install() method!" % self.name)
+
+ def install(self, spec, prefix):
+ if 'setup' in self.install_phases:
+ self.install_setup()
+
+ if 'configure' in self.install_phases:
+ self.install_configure()
+
+ if 'build' in self.install_phases:
+ self.install_build()
+
+ if 'install' in self.install_phases:
+ self.install_install()
+ else:
+ # Create a dummy file so the build doesn't fail.
+ # That way, the module file will also be created.
+ with open(os.path.join(prefix, 'dummy'), 'w'):
+ pass
+
+
+# stackoverflow.com/questions/12791997/how-do-you-do-a-simple-chmod-x-from-within-python
+def make_executable(path):
+ mode = os.stat(path).st_mode
+ mode |= (mode & 0o444) >> 2 # copy R bits to X
+ os.chmod(path, mode)
+
+
+class CMakePackage(StagedPackage):
+
+ def make_make(self):
+ import multiprocessing
+ # number of jobs spack will to build with.
+ jobs = multiprocessing.cpu_count()
+ if not self.parallel:
+ jobs = 1
+ elif self.make_jobs:
+ jobs = self.make_jobs
+
+ make = spack.build_environment.MakeExecutable('make', jobs)
+ return make
+
+ def configure_args(self):
+ """Returns package-specific arguments to be provided to
+ the configure command.
+ """
+ return list()
+
+ def configure_env(self):
+ """Returns package-specific environment under which the
+ configure command should be run.
+ """
+ return dict()
+
+ def transitive_inc_path(self):
+ return ';'.join(
+ os.path.join(dep, 'include')
+ for dep in os.environ['SPACK_DEPENDENCIES'].split(os.pathsep)
+ )
+
+ def install_setup(self):
+ cmd = [str(which('cmake'))]
+ cmd += spack.build_environment.get_std_cmake_args(self)
+ cmd += ['-DCMAKE_INSTALL_PREFIX=%s' % os.environ['SPACK_PREFIX'],
+ '-DCMAKE_C_COMPILER=%s' % os.environ['SPACK_CC'],
+ '-DCMAKE_CXX_COMPILER=%s' % os.environ['SPACK_CXX'],
+ '-DCMAKE_Fortran_COMPILER=%s' % os.environ['SPACK_FC']]
+ cmd += self.configure_args()
+
+ env = {
+ 'PATH': os.environ['PATH'],
+ 'SPACK_TRANSITIVE_INCLUDE_PATH': self.transitive_inc_path(),
+ 'CMAKE_PREFIX_PATH': os.environ['CMAKE_PREFIX_PATH']
+ }
+
+ setup_fname = 'spconfig.py'
+ with open(setup_fname, 'w') as fout:
+ fout.write(r"""#!%s
+#
+
+import sys
+import os
+import subprocess
+
+def cmdlist(str):
+ return list(x.strip().replace("'",'') for x in str.split('\n') if x)
+env = dict(os.environ)
+""" % sys.executable)
+
+ env_vars = sorted(list(env.keys()))
+ for name in env_vars:
+ val = env[name]
+ if string.find(name, 'PATH') < 0:
+ fout.write('env[%s] = %s\n' % (repr(name), repr(val)))
+ else:
+ if name == 'SPACK_TRANSITIVE_INCLUDE_PATH':
+ sep = ';'
+ else:
+ sep = ':'
+
+ fout.write('env[%s] = "%s".join(cmdlist("""\n'
+ % (repr(name), sep))
+ for part in string.split(val, sep):
+ fout.write(' %s\n' % part)
+ fout.write('"""))\n')
+
+ fout.write("env['CMAKE_TRANSITIVE_INCLUDE_PATH'] = "
+ "env['SPACK_TRANSITIVE_INCLUDE_PATH'] # Deprecated\n")
+ fout.write('\ncmd = cmdlist("""\n')
+ fout.write('%s\n' % cmd[0])
+ for arg in cmd[1:]:
+ fout.write(' %s\n' % arg)
+ fout.write('""") + sys.argv[1:]\n')
+ fout.write('\nproc = subprocess.Popen(cmd, env=env)\n')
+ fout.write('proc.wait()\n')
+ make_executable(setup_fname)
+
+ def install_configure(self):
+ cmake = which('cmake')
+ with working_dir(self.build_directory, create=True):
+ env = os.environ
+ env.update(self.configure_env())
+ env['SPACK_TRANSITIVE_INCLUDE_PATH'] = self.transitive_inc_path()
+
+ options = self.configure_args()
+ options += spack.build_environment.get_std_cmake_args(self)
+ cmake(self.source_directory, *options)
+
+ def install_build(self):
+ make = self.make_make()
+ with working_dir(self.build_directory, create=False):
+ make()
+
+ def install_install(self):
+ make = self.make_make()
+ with working_dir(self.build_directory, create=False):
+ make('install')
+
+
class FetchError(spack.error.SpackError):
"""Raised when something goes wrong during fetch."""
@@ -1536,12 +1741,14 @@ class ExtensionError(PackageError):
class ExtensionConflictError(ExtensionError):
+
def __init__(self, path):
super(ExtensionConflictError, self).__init__(
"Extension blocked by file: %s" % path)
class ActivationError(ExtensionError):
+
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)
diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py
index 8adf957e7f..1b88db2d7c 100644
--- a/lib/spack/spack/parse.py
+++ b/lib/spack/spack/parse.py
@@ -29,6 +29,7 @@ import spack.error
class Token:
"""Represents tokens; generated from input by lexer and fed to parse()."""
+
def __init__(self, type, value='', start=0, end=0):
self.type = type
self.value = value
@@ -51,11 +52,13 @@ class Token:
class Lexer(object):
"""Base class for Lexers that keep track of line numbers."""
+
def __init__(self, lexicon):
self.scanner = re.Scanner(lexicon)
def token(self, type, value=''):
- return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0))
+ return Token(type, value,
+ self.scanner.match.start(0), self.scanner.match.end(0))
def lex(self, text):
tokens, remainder = self.scanner.scan(text)
@@ -66,10 +69,11 @@ class Lexer(object):
class Parser(object):
"""Base class for simple recursive descent parsers."""
+
def __init__(self, lexer):
- self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty.
- self.token = Token(None) # last accepted token starts at beginning of file
- self.next = None # next token
+ self.tokens = iter([]) # iterators over tokens, handled in order.
+ self.token = Token(None) # last accepted token
+ self.next = None # next token
self.lexer = lexer
self.text = None
@@ -82,11 +86,12 @@ class Parser(object):
def push_tokens(self, iterable):
"""Adds all tokens in some iterable to the token stream."""
- self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens)
+ self.tokens = itertools.chain(
+ iter(iterable), iter([self.next]), self.tokens)
self.gettok()
def accept(self, id):
- """Puts the next symbol in self.token if we like it. Then calls gettok()"""
+ """Put the next symbol in self.token if accepted, then call gettok()"""
if self.next and self.next.is_a(id):
self.token = self.next
self.gettok()
@@ -124,9 +129,9 @@ class Parser(object):
return self.do_parse()
-
class ParseError(spack.error.SpackError):
"""Raised when we don't hit an error while parsing."""
+
def __init__(self, message, string, pos):
super(ParseError, self).__init__(message)
self.string = string
@@ -135,5 +140,6 @@ class ParseError(spack.error.SpackError):
class LexError(ParseError):
"""Raised when we don't know how to lex something."""
+
def __init__(self, message, string, pos):
super(LexError, self).__init__(message, string, pos)
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index c2e181be2f..0bd9f5d29d 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -24,7 +24,6 @@
##############################################################################
import os
-import llnl.util.tty as tty
from llnl.util.filesystem import join_path
import spack
@@ -59,7 +58,6 @@ class Patch(object):
if not os.path.isfile(self.path):
raise NoSuchPatchFileError(pkg_name, self.path)
-
def apply(self, stage):
"""Fetch this patch, if necessary, and apply it to the source
code in the supplied stage.
@@ -84,9 +82,9 @@ class Patch(object):
patch_stage.destroy()
-
class NoSuchPatchFileError(spack.error.SpackError):
"""Raised when user specifies a patch file that doesn't exist."""
+
def __init__(self, package, path):
super(NoSuchPatchFileError, self).__init__(
"No such patch file for package %s: %s" % (package, path))
diff --git a/lib/spack/spack/platforms/__init__.py b/lib/spack/spack/platforms/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/spack/platforms/__init__.py
diff --git a/lib/spack/spack/platforms/bgq.py b/lib/spack/spack/platforms/bgq.py
new file mode 100644
index 0000000000..91afdd04db
--- /dev/null
+++ b/lib/spack/spack/platforms/bgq.py
@@ -0,0 +1,18 @@
+import os
+from spack.architecture import Platform, Target
+
+
+class Bgq(Platform):
+ priority = 30
+ front_end = 'power7'
+ back_end = 'powerpc'
+ default = 'powerpc'
+
+ def __init__(self):
+ super(Bgq, self).__init__('bgq')
+ self.add_target(self.front_end, Target(self.front_end))
+ self.add_target(self.back_end, Target(self.back_end,))
+
+ @classmethod
+ def detect(self):
+ return os.path.exists('/bgsys')
diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py
new file mode 100644
index 0000000000..0059b49ff1
--- /dev/null
+++ b/lib/spack/spack/platforms/cray.py
@@ -0,0 +1,105 @@
+import os
+import re
+import spack.config
+import llnl.util.tty as tty
+from spack.util.executable import which
+from spack.architecture import Platform, Target, NoPlatformError
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnl import Cnl
+from llnl.util.filesystem import join_path
+
+# Craype- module prefixes that are not valid CPU targets.
+NON_TARGETS = ('hugepages', 'network', 'target', 'accel', 'xtpe')
+
+
+def _target_from_clean_env(name):
+ '''Return the default back_end target as loaded in a clean login session.
+
+ A bash subshell is launched with a wiped environment and the list of loaded
+ modules is parsed for the first acceptable CrayPE target.
+ '''
+ # Based on the incantation:
+ # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')"
+ targets = []
+ if name != 'front_end':
+ env = which('env')
+ env.add_default_arg('-')
+ # CAUTION - $USER is generally needed to initialize the environment.
+ # There may be other variables needed for general success.
+ output = env('USER=%s' % os.environ['USER'],
+ '/bin/bash', '--noprofile', '--norc', '-c',
+ '. /etc/profile; module list -lt',
+ output=str, error=str)
+ default_modules = [i for i in output.splitlines()
+ if len(i.split()) == 1]
+ tty.debug("Found default modules:",
+ *[" " + mod for mod in default_modules])
+ pattern = 'craype-(?!{0})(\S*)'.format('|'.join(NON_TARGETS))
+ for mod in default_modules:
+ if 'craype-' in mod:
+ targets.extend(re.findall(pattern, mod))
+ return targets[0] if targets else None
+
+
+class Cray(Platform):
+ priority = 10
+
+ def __init__(self):
+ ''' Create a Cray system platform.
+
+ Target names should use craype target names but not include the
+ 'craype-' prefix. Uses first viable target from:
+ self
+ envars [SPACK_FRONT_END, SPACK_BACK_END]
+ configuration file "targets.yaml" with keys 'front_end', 'back_end'
+ scanning /etc/bash/bashrc.local for back_end only
+ '''
+ super(Cray, self).__init__('cray')
+
+ # Get targets from config or make best guess from environment:
+ conf = spack.config.get_config('targets')
+ for name in ('front_end', 'back_end'):
+ _target = getattr(self, name, None)
+ if _target is None:
+ _target = os.environ.get('SPACK_' + name.upper())
+ if _target is None:
+ _target = conf.get(name)
+ if _target is None:
+ _target = _target_from_clean_env(name)
+ setattr(self, name, _target)
+
+ if _target is not None:
+ self.add_target(name, Target(_target, 'craype-' + _target))
+ self.add_target(_target, Target(_target, 'craype-' + _target))
+
+ if self.back_end is not None:
+ self.default = self.back_end
+ self.add_target(
+ 'default', Target(self.default, 'craype-' + self.default))
+ else:
+ raise NoPlatformError()
+
+ front_distro = LinuxDistro()
+ back_distro = Cnl()
+
+ self.default_os = str(back_distro)
+ self.back_os = self.default_os
+ self.front_os = str(front_distro)
+
+ self.add_operating_system(self.back_os, back_distro)
+ self.add_operating_system(self.front_os, front_distro)
+
+ @classmethod
+ def setup_platform_environment(self, pkg, env):
+ """ Change the linker to default dynamic to be more
+ similar to linux/standard linker behavior
+ """
+ env.set('CRAYPE_LINK_TYPE', 'dynamic')
+ cray_wrapper_names = join_path(spack.build_env_path, 'cray')
+ if os.path.isdir(cray_wrapper_names):
+ env.prepend_path('PATH', cray_wrapper_names)
+ env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names)
+
+ @classmethod
+ def detect(self):
+ return os.environ.get('CRAYPE_VERSION') is not None
diff --git a/lib/spack/spack/platforms/darwin.py b/lib/spack/spack/platforms/darwin.py
new file mode 100644
index 0000000000..974ce3a3f9
--- /dev/null
+++ b/lib/spack/spack/platforms/darwin.py
@@ -0,0 +1,27 @@
+import subprocess
+from spack.architecture import Platform, Target
+from spack.operating_systems.mac_os import MacOs
+
+
+class Darwin(Platform):
+ priority = 89
+ front_end = 'x86_64'
+ back_end = 'x86_64'
+ default = 'x86_64'
+
+ def __init__(self):
+ super(Darwin, self).__init__('darwin')
+ self.add_target(self.default, Target(self.default))
+ mac_os = MacOs()
+
+ self.default_os = str(mac_os)
+ self.front_os = str(mac_os)
+ self.back_os = str(mac_os)
+
+ self.add_operating_system(str(mac_os), mac_os)
+
+ @classmethod
+ def detect(self):
+ platform = subprocess.Popen(['uname', '-a'], stdout=subprocess.PIPE)
+ platform, _ = platform.communicate()
+ return 'darwin' in platform.strip().lower()
diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py
new file mode 100644
index 0000000000..38d2cdbfec
--- /dev/null
+++ b/lib/spack/spack/platforms/linux.py
@@ -0,0 +1,32 @@
+import subprocess
+import platform
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+
+
+class Linux(Platform):
+ priority = 90
+
+ def __init__(self):
+ super(Linux, self).__init__('linux')
+ self.add_target('x86_64', Target('x86_64'))
+ self.add_target('ppc64le', Target('ppc64le'))
+
+ self.default = platform.machine()
+ self.front_end = platform.machine()
+ self.back_end = platform.machine()
+
+ if self.default not in self.targets:
+ self.add_target(self.default, Target(self.default))
+
+ linux_dist = LinuxDistro()
+ self.default_os = str(linux_dist)
+ self.front_os = self.default_os
+ self.back_os = self.default_os
+ self.add_operating_system(str(linux_dist), linux_dist)
+
+ @classmethod
+ def detect(self):
+ platform = subprocess.Popen(['uname', '-a'], stdout=subprocess.PIPE)
+ platform, _ = platform.communicate()
+ return 'linux' in platform.strip().lower()
diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py
new file mode 100644
index 0000000000..c918211555
--- /dev/null
+++ b/lib/spack/spack/platforms/test.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnl import Cnl
+
+
+class Test(Platform):
+ priority = 1000000
+ front_end = 'x86_32'
+ back_end = 'x86_64'
+ default = 'x86_64'
+
+ back_os = 'CNL10'
+ default_os = 'CNL10'
+
+ def __init__(self):
+ super(Test, self).__init__('test')
+ self.add_target(self.default, Target(self.default))
+ self.add_target(self.front_end, Target(self.front_end))
+
+ self.add_operating_system(self.default_os, Cnl())
+ linux_dist = LinuxDistro()
+ self.front_os = linux_dist.name
+ self.add_operating_system(self.front_os, linux_dist)
+
+ @classmethod
+ def detect(self):
+ return True
diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py
index 4820584150..45a41c8e2b 100644
--- a/lib/spack/spack/preferred_packages.py
+++ b/lib/spack/spack/preferred_packages.py
@@ -26,8 +26,10 @@
import spack
from spack.version import *
+
class PreferredPackages(object):
- _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] } # Arbitrary, but consistent
+ # Arbitrary, but consistent
+ _default_order = {'compiler': ['gcc', 'intel', 'clang', 'pgi', 'xlc']}
def __init__(self):
self.preferred = spack.config.get_config('packages')
@@ -35,24 +37,25 @@ class PreferredPackages(object):
# Given a package name, sort component (e.g, version, compiler, ...), and
# a second_key (used by providers), return the list
- def _order_for_package(self, pkgname, component, second_key, test_all=True):
+ def _order_for_package(self, pkgname, component, second_key,
+ test_all=True):
pkglist = [pkgname]
if test_all:
pkglist.append('all')
for pkg in pkglist:
order = self.preferred.get(pkg, {}).get(component, {})
- if type(order) is dict:
+ if isinstance(order, dict) and second_key:
order = order.get(second_key, {})
if not order:
continue
return [str(s).strip() for s in order]
return []
-
# A generic sorting function. Given a package name and sort
# component, return less-than-0, 0, or greater-than-0 if
# a is respectively less-than, equal to, or greater than b.
- def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
+ def _component_compare(self, pkgname, component, a, b,
+ reverse_natural_compare, second_key):
if a is None:
return -1
if b is None:
@@ -84,92 +87,109 @@ class PreferredPackages(object):
else:
return 0
-
# A sorting function for specs. Similar to component_compare, but
# a and b are considered to match entries in the sorting list if they
# satisfy the list component.
- def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
- if not a or not a.concrete:
+ def _spec_compare(self, pkgname, component, a, b,
+ reverse_natural_compare, second_key):
+ if not a or (not a.concrete and not second_key):
return -1
- if not b or not b.concrete:
+ if not b or (not b.concrete and not second_key):
return 1
specs = self._spec_for_pkgname(pkgname, component, second_key)
a_index = None
b_index = None
reverse = -1 if reverse_natural_compare else 1
for i, cspec in enumerate(specs):
- if a_index == None and (cspec.satisfies(a) or a.satisfies(cspec)):
+ if a_index is None and (cspec.satisfies(a) or a.satisfies(cspec)):
a_index = i
if b_index:
break
- if b_index == None and (cspec.satisfies(b) or b.satisfies(cspec)):
+ if b_index is None and (cspec.satisfies(b) or b.satisfies(cspec)):
b_index = i
if a_index:
break
- if a_index != None and b_index == None: return -1
- elif a_index == None and b_index != None: return 1
- elif a_index != None and b_index == a_index: return -1 * cmp(a, b)
- elif a_index != None and b_index != None and a_index != b_index: return cmp(a_index, b_index)
- else: return cmp(a, b) * reverse
-
-
+ if a_index is not None and b_index is None:
+ return -1
+ elif a_index is None and b_index is not None:
+ return 1
+ elif a_index is not None and b_index == a_index:
+ return -1 * cmp(a, b)
+ elif (a_index is not None and b_index is not None and
+ a_index != b_index):
+ return cmp(a_index, b_index)
+ else:
+ return cmp(a, b) * reverse
# Given a sort order specified by the pkgname/component/second_key, return
# a list of CompilerSpecs, VersionLists, or Specs for that sorting list.
def _spec_for_pkgname(self, pkgname, component, second_key):
key = (pkgname, component, second_key)
- if not key in self._spec_for_pkgname_cache:
+ if key not in self._spec_for_pkgname_cache:
pkglist = self._order_for_package(pkgname, component, second_key)
if not pkglist:
if component in self._default_order:
pkglist = self._default_order[component]
if component == 'compiler':
- self._spec_for_pkgname_cache[key] = [spack.spec.CompilerSpec(s) for s in pkglist]
+ self._spec_for_pkgname_cache[key] = \
+ [spack.spec.CompilerSpec(s) for s in pkglist]
elif component == 'version':
- self._spec_for_pkgname_cache[key] = [VersionList(s) for s in pkglist]
+ self._spec_for_pkgname_cache[key] = \
+ [VersionList(s) for s in pkglist]
else:
- self._spec_for_pkgname_cache[key] = [spack.spec.Spec(s) for s in pkglist]
+ self._spec_for_pkgname_cache[key] = \
+ [spack.spec.Spec(s) for s in pkglist]
return self._spec_for_pkgname_cache[key]
-
def provider_compare(self, pkgname, provider_str, a, b):
- """Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or
- greater-than b. A and b are possible implementations of provider_str.
- One provider is less-than another if it is preferred over the other.
- For example, provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would return -1 if
- mvapich should be preferred over openmpi for scorep."""
- return self._spec_compare(pkgname, 'providers', a, b, False, provider_str)
-
+ """Return less-than-0, 0, or greater than 0 if a is respecively
+ less-than, equal-to, or greater-than b. A and b are possible
+ implementations of provider_str. One provider is less-than another
+ if it is preferred over the other. For example,
+ provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would
+ return -1 if mvapich should be preferred over openmpi for scorep."""
+ return self._spec_compare(pkgname, 'providers', a, b, False,
+ provider_str)
def spec_has_preferred_provider(self, pkgname, provider_str):
- """Return True iff the named package has a list of preferred provider"""
- return bool(self._order_for_package(pkgname, 'providers', provider_str, False))
-
+ """Return True iff the named package has a list of preferred
+ providers"""
+ return bool(self._order_for_package(pkgname, 'providers',
+ provider_str, False))
+
+ def spec_preferred_variants(self, pkgname):
+ """Return a VariantMap of preferred variants and their values"""
+ variants = self.preferred.get(pkgname, {}).get('variants', '')
+ if not isinstance(variants, basestring):
+ variants = " ".join(variants)
+ return spack.spec.Spec("%s %s" % (pkgname, variants)).variants
def version_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if version a of pkgname is
- respecively less-than, equal-to, or greater-than version b of pkgname.
- One version is less-than another if it is preferred over the other."""
+ respectively less-than, equal-to, or greater-than version b of
+ pkgname. One version is less-than another if it is preferred over
+ the other."""
return self._spec_compare(pkgname, 'version', a, b, True, None)
-
def variant_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if variant a of pkgname is
- respecively less-than, equal-to, or greater-than variant b of pkgname.
- One variant is less-than another if it is preferred over the other."""
+ respectively less-than, equal-to, or greater-than variant b of
+ pkgname. One variant is less-than another if it is preferred over
+ the other."""
return self._component_compare(pkgname, 'variant', a, b, False, None)
-
def architecture_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if architecture a of pkgname is
- respecively less-than, equal-to, or greater-than architecture b of pkgname.
- One architecture is less-than another if it is preferred over the other."""
- return self._component_compare(pkgname, 'architecture', a, b, False, None)
-
+ """Return less-than-0, 0, or greater than 0 if architecture a of pkgname
+ is respectively less-than, equal-to, or greater-than architecture b
+ of pkgname. One architecture is less-than another if it is preferred
+ over the other."""
+ return self._component_compare(pkgname, 'architecture', a, b,
+ False, None)
def compiler_compare(self, pkgname, a, b):
"""Return less-than-0, 0, or greater than 0 if compiler a of pkgname is
- respecively less-than, equal-to, or greater-than compiler b of pkgname.
- One compiler is less-than another if it is preferred over the other."""
+ respecively less-than, equal-to, or greater-than compiler b of
+ pkgname. One compiler is less-than another if it is preferred over
+ the other."""
return self._spec_compare(pkgname, 'compiler', a, b, False, None)
diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/provider_index.py
index bb8333f023..3f9cd285e7 100644
--- a/lib/spack/spack/virtual.py
+++ b/lib/spack/spack/provider_index.py
@@ -25,8 +25,14 @@
"""
The ``virtual`` module contains utility classes for virtual dependencies.
"""
-import spack.spec
-import itertools
+from itertools import product as iproduct
+from pprint import pformat
+
+import yaml
+from yaml.error import MarkedYAMLError
+
+import spack
+
class ProviderIndex(object):
"""This is a dict of dicts used for finding providers of particular
@@ -44,13 +50,30 @@ class ProviderIndex(object):
Calling providers_for(spec) will find specs that provide a
matching implementation of MPI.
+
"""
- def __init__(self, specs, **kwargs):
- # TODO: come up with another name for this. This "restricts" values to
- # the verbatim impu specs (i.e., it doesn't pre-apply package's constraints, and
- # keeps things as broad as possible, so it's really the wrong name)
- self.restrict = kwargs.setdefault('restrict', False)
+ def __init__(self, specs=None, restrict=False):
+ """Create a new ProviderIndex.
+
+ Optional arguments:
+
+ specs
+ List (or sequence) of specs. If provided, will call
+ `update` on this ProviderIndex with each spec in the list.
+
+ restrict
+ "restricts" values to the verbatim input specs; do not
+ pre-apply package's constraints.
+
+ TODO: rename this. It is intended to keep things as broad
+ as possible without overly restricting results, so it is
+ not the best name.
+ """
+ if specs is None:
+ specs = []
+
+ self.restrict = restrict
self.providers = {}
for spec in specs:
@@ -62,9 +85,8 @@ class ProviderIndex(object):
self.update(spec)
-
def update(self, spec):
- if type(spec) != spack.spec.Spec:
+ if not isinstance(spec, spack.spec.Spec):
spec = spack.spec.Spec(spec)
if not spec.name:
@@ -75,12 +97,13 @@ class ProviderIndex(object):
pkg = spec.package
for provided_spec, provider_spec in pkg.provided.iteritems():
- provider_spec.compiler_flags = spec.compiler_flags.copy()#We want satisfaction other than flags
+ # We want satisfaction other than flags
+ provider_spec.compiler_flags = spec.compiler_flags.copy()
if provider_spec.satisfies(spec, deps=False):
provided_name = provided_spec.name
provider_map = self.providers.setdefault(provided_name, {})
- if not provided_spec in provider_map:
+ if provided_spec not in provider_map:
provider_map[provided_spec] = set()
if self.restrict:
@@ -102,7 +125,6 @@ class ProviderIndex(object):
constrained.constrain(provider_spec)
provider_map[provided_spec].add(constrained)
-
def providers_for(self, *vpkg_specs):
"""Gives specs of all packages that provide virtual packages
with the supplied specs."""
@@ -114,26 +136,25 @@ class ProviderIndex(object):
# Add all the providers that satisfy the vpkg spec.
if vspec.name in self.providers:
- for provider_spec, spec_set in self.providers[vspec.name].items():
- if provider_spec.satisfies(vspec, deps=False):
+ for p_spec, spec_set in self.providers[vspec.name].items():
+ if p_spec.satisfies(vspec, deps=False):
providers.update(spec_set)
# Return providers in order
return sorted(providers)
-
# TODO: this is pretty darned nasty, and inefficient, but there
# are not that many vdeps in most specs.
def _cross_provider_maps(self, lmap, rmap):
result = {}
- for lspec, rspec in itertools.product(lmap, rmap):
+ for lspec, rspec in iproduct(lmap, rmap):
try:
constrained = lspec.constrained(rspec)
except spack.spec.UnsatisfiableSpecError:
continue
# lp and rp are left and right provider specs.
- for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
+ for lp_spec, rp_spec in iproduct(lmap[lspec], rmap[rspec]):
if lp_spec.name == rp_spec.name:
try:
const = lp_spec.constrained(rp_spec, deps=False)
@@ -142,12 +163,10 @@ class ProviderIndex(object):
continue
return result
-
def __contains__(self, name):
"""Whether a particular vpkg name is in the index."""
return name in self.providers
-
def satisfies(self, other):
"""Check that providers of virtual specs are compatible."""
common = set(self.providers) & set(other.providers)
@@ -164,3 +183,111 @@ class ProviderIndex(object):
result[name] = crossed
return all(c in result for c in common)
+
+ def to_yaml(self, stream=None):
+ provider_list = self._transform(
+ lambda vpkg, pset: [
+ vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list)
+
+ yaml.dump({'provider_index': {'providers': provider_list}},
+ stream=stream)
+
+ @staticmethod
+ def from_yaml(stream):
+ try:
+ yfile = yaml.load(stream)
+ except MarkedYAMLError, e:
+ raise spack.spec.SpackYAMLError(
+ "error parsing YAML ProviderIndex cache:", str(e))
+
+ if not isinstance(yfile, dict):
+ raise spack.spec.SpackYAMLError(
+ "YAML ProviderIndex was not a dict.")
+
+ if 'provider_index' not in yfile:
+ raise spack.spec.SpackYAMLError(
+ "YAML ProviderIndex does not start with 'provider_index'")
+
+ index = ProviderIndex()
+ providers = yfile['provider_index']['providers']
+ index.providers = _transform(
+ providers,
+ lambda vpkg, plist: (
+ spack.spec.Spec.from_node_dict(vpkg),
+ set(spack.spec.Spec.from_node_dict(p) for p in plist)))
+ return index
+
+ def merge(self, other):
+ """Merge `other` ProviderIndex into this one."""
+ other = other.copy() # defensive copy.
+
+ for pkg in other.providers:
+ if pkg not in self.providers:
+ self.providers[pkg] = other.providers[pkg]
+ continue
+
+ spdict, opdict = self.providers[pkg], other.providers[pkg]
+ for provided_spec in opdict:
+ if provided_spec not in spdict:
+ spdict[provided_spec] = opdict[provided_spec]
+ continue
+
+ spdict[provided_spec] += opdict[provided_spec]
+
+ def remove_provider(self, pkg_name):
+ """Remove a provider from the ProviderIndex."""
+ empty_pkg_dict = []
+ for pkg, pkg_dict in self.providers.items():
+ empty_pset = []
+ for provided, pset in pkg_dict.items():
+ same_name = set(p for p in pset if p.fullname == pkg_name)
+ pset.difference_update(same_name)
+
+ if not pset:
+ empty_pset.append(provided)
+
+ for provided in empty_pset:
+ del pkg_dict[provided]
+
+ if not pkg_dict:
+ empty_pkg_dict.append(pkg)
+
+ for pkg in empty_pkg_dict:
+ del self.providers[pkg]
+
+ def copy(self):
+ """Deep copy of this ProviderIndex."""
+ clone = ProviderIndex()
+ clone.providers = self._transform(
+ lambda vpkg, pset: (vpkg, set((p.copy() for p in pset))))
+ return clone
+
+ def __eq__(self, other):
+ return self.providers == other.providers
+
+ def _transform(self, transform_fun, out_mapping_type=dict):
+ return _transform(self.providers, transform_fun, out_mapping_type)
+
+ def __str__(self):
+ return pformat(
+ _transform(self.providers,
+ lambda k, v: (k, list(v))))
+
+
+def _transform(providers, transform_fun, out_mapping_type=dict):
+ """Syntactic sugar for transforming a providers dict.
+
+ transform_fun takes a (vpkg, pset) mapping and runs it on each
+ pair in nested dicts.
+
+ """
+ def mapiter(mappings):
+ if isinstance(mappings, dict):
+ return mappings.iteritems()
+ else:
+ return iter(mappings)
+
+ return dict(
+ (name, out_mapping_type([
+ transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)]))
+ for name, mappings in providers.items())
diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py
index 70134964ad..2d8dc39648 100644
--- a/lib/spack/spack/repository.py
+++ b/lib/spack/spack/repository.py
@@ -23,6 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import stat
+import shutil
+import errno
import exceptions
import sys
import inspect
@@ -30,15 +33,18 @@ import imp
import re
import traceback
from bisect import bisect_left
-from external import yaml
+from types import ModuleType
+
+import yaml
import llnl.util.tty as tty
from llnl.util.filesystem import *
+import spack
import spack.error
import spack.config
import spack.spec
-from spack.virtual import ProviderIndex
+from spack.provider_index import ProviderIndex
from spack.util.naming import *
#
@@ -51,6 +57,7 @@ repo_namespace = 'spack.pkg'
# These names describe how repos should be laid out in the filesystem.
#
repo_config_name = 'repo.yaml' # Top-level filename for repo config.
+repo_index_name = 'index.yaml' # Top-level filename for repository index.
packages_dir_name = 'packages' # Top-level repo directory containing pkgs.
package_file_name = 'package.py' # Filename for packages in a repository.
@@ -61,6 +68,7 @@ NOT_PROVIDED = object()
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
function to a Spec."""
+
def converter(self, spec_like, *args, **kwargs):
if not isinstance(spec_like, spack.spec.Spec):
spec_like = spack.spec.Spec(spec_like)
@@ -68,12 +76,22 @@ def _autospec(function):
return converter
-def _make_namespace_module(ns):
- module = imp.new_module(ns)
- module.__file__ = "(spack namespace)"
- module.__path__ = []
- module.__package__ = ns
- return module
+class SpackNamespace(ModuleType):
+ """ Allow lazy loading of modules."""
+
+ def __init__(self, namespace):
+ super(SpackNamespace, self).__init__(namespace)
+ self.__file__ = "(spack namespace)"
+ self.__path__ = []
+ self.__name__ = namespace
+ self.__package__ = namespace
+ self.__modules = {}
+
+ def __getattr__(self, name):
+ """Getattr lazily loads modules if they're not already loaded."""
+ submodule = self.__package__ + '.' + name
+ setattr(self, name, __import__(submodule))
+ return getattr(self, name)
def substitute_spack_prefix(path):
@@ -96,6 +114,7 @@ class RepoPath(object):
combined results of the Repos in its list instead of on a
single package repository.
"""
+
def __init__(self, *repo_dirs, **kwargs):
# super-namespace for all packages in the RepoPath
self.super_namespace = kwargs.get('namespace', repo_namespace)
@@ -104,7 +123,7 @@ class RepoPath(object):
self.by_namespace = NamespaceTrie()
self.by_path = {}
- self._all_package_names = []
+ self._all_package_names = None
self._provider_index = None
# If repo_dirs is empty, just use the configuration
@@ -125,7 +144,6 @@ class RepoPath(object):
"To remove the bad repository, run this command:",
" spack repo rm %s" % root)
-
def swap(self, other):
"""Convenience function to make swapping repostiories easier.
@@ -143,7 +161,6 @@ class RepoPath(object):
setattr(self, attr, getattr(other, attr))
setattr(other, attr, tmp)
-
def _add(self, repo):
"""Add a repository to the namespace and path indexes.
@@ -157,36 +174,28 @@ class RepoPath(object):
if repo.namespace in self.by_namespace:
raise DuplicateRepoError(
"Package repos '%s' and '%s' both provide namespace %s"
- % (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
+ % (repo.root, self.by_namespace[repo.namespace].root,
+ repo.namespace))
# Add repo to the pkg indexes
self.by_namespace[repo.full_namespace] = repo
self.by_path[repo.root] = repo
- # add names to the cached name list
- new_pkgs = set(repo.all_package_names())
- new_pkgs.update(set(self._all_package_names))
- self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower())
-
-
def put_first(self, repo):
"""Add repo first in the search path."""
self._add(repo)
self.repos.insert(0, repo)
-
def put_last(self, repo):
"""Add repo last in the search path."""
self._add(repo)
self.repos.append(repo)
-
def remove(self, repo):
"""Remove a repo from the search path."""
if repo in self.repos:
self.repos.remove(repo)
-
def get_repo(self, namespace, default=NOT_PROVIDED):
"""Get a repository by namespace.
Arguments
@@ -206,38 +215,45 @@ class RepoPath(object):
return default
return self.by_namespace[fullspace]
-
def first_repo(self):
"""Get the first repo in precedence order."""
return self.repos[0] if self.repos else None
-
def all_package_names(self):
"""Return all unique package names in all repositories."""
+ if self._all_package_names is None:
+ all_pkgs = set()
+ for repo in self.repos:
+ for name in repo.all_package_names():
+ all_pkgs.add(name)
+ self._all_package_names = sorted(all_pkgs, key=lambda n: n.lower())
return self._all_package_names
-
def all_packages(self):
for name in self.all_package_names():
yield self.get(name)
+ @property
+ def provider_index(self):
+ """Merged ProviderIndex from all Repos in the RepoPath."""
+ if self._provider_index is None:
+ self._provider_index = ProviderIndex()
+ for repo in reversed(self.repos):
+ self._provider_index.merge(repo.provider_index)
+
+ return self._provider_index
@_autospec
def providers_for(self, vpkg_spec):
- if self._provider_index is None:
- self._provider_index = ProviderIndex(self.all_package_names())
-
- providers = self._provider_index.providers_for(vpkg_spec)
+ providers = self.provider_index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError(vpkg_spec.name)
return providers
-
@_autospec
def extensions_for(self, extendee_spec):
return [p for p in self.all_packages() if p.extends(extendee_spec)]
-
def find_module(self, fullname, path=None):
"""Implements precedence for overlaid namespaces.
@@ -264,7 +280,6 @@ class RepoPath(object):
return None
-
def load_module(self, fullname):
"""Handles loading container namespaces when necessary.
@@ -273,18 +288,14 @@ class RepoPath(object):
if fullname in sys.modules:
return sys.modules[fullname]
- # partition fullname into prefix and module name.
- namespace, dot, module_name = fullname.rpartition('.')
-
if not self.by_namespace.is_prefix(fullname):
raise ImportError("No such Spack repo: %s" % fullname)
- module = _make_namespace_module(namespace)
+ module = SpackNamespace(fullname)
module.__loader__ = self
sys.modules[fullname] = module
return module
-
@_autospec
def repo_for_pkg(self, spec):
"""Given a spec, get the repository for its package."""
@@ -306,7 +317,6 @@ class RepoPath(object):
# that can operate on packages that don't exist yet.
return self.first_repo()
-
@_autospec
def get(self, spec, new=False):
"""Find a repo that contains the supplied spec's package.
@@ -315,12 +325,10 @@ class RepoPath(object):
"""
return self.repo_for_pkg(spec).get(spec)
-
def get_pkg_class(self, pkg_name):
"""Find a class for the spec's package and return the class object."""
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
-
@_autospec
def dump_provenance(self, spec, path):
"""Dump provenance information for a spec to a particular path.
@@ -330,24 +338,19 @@ class RepoPath(object):
"""
return self.repo_for_pkg(spec).dump_provenance(spec, path)
-
def dirname_for_package_name(self, pkg_name):
return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
-
def filename_for_package_name(self, pkg_name):
return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
-
def exists(self, pkg_name):
return any(repo.exists(pkg_name) for repo in self.repos)
-
def __contains__(self, pkg_name):
return self.exists(pkg_name)
-
class Repo(object):
"""Class representing a package repository in the filesystem.
@@ -360,6 +363,7 @@ class Repo(object):
A Python namespace where the repository's packages should live.
"""
+
def __init__(self, root, namespace=repo_namespace):
"""Instantiate a package repository from a filesystem path.
@@ -381,12 +385,14 @@ class Repo(object):
# check and raise BadRepoError on fail.
def check(condition, msg):
- if not condition: raise BadRepoError(msg)
+ if not condition:
+ raise BadRepoError(msg)
# Validate repository layout.
- self.config_file = join_path(self.root, repo_config_name)
+ self.config_file = join_path(self.root, repo_config_name)
check(os.path.isfile(self.config_file),
"No %s found in '%s'" % (repo_config_name, root))
+
self.packages_path = join_path(self.root, packages_dir_name)
check(os.path.isdir(self.packages_path),
"No directory '%s' found in '%s'" % (repo_config_name, root))
@@ -398,12 +404,14 @@ class Repo(object):
self.namespace = config['namespace']
check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace),
- ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) +
+ ("Invalid namespace '%s' in repo '%s'. "
+ % (self.namespace, self.root)) +
"Namespaces must be valid python identifiers separated by '.'")
# Set up 'full_namespace' to include the super-namespace
if self.super_namespace:
- self.full_namespace = "%s.%s" % (self.super_namespace, self.namespace)
+ self.full_namespace = "%s.%s" % (
+ self.super_namespace, self.namespace)
else:
self.full_namespace = self.namespace
@@ -414,12 +422,21 @@ class Repo(object):
self._modules = {}
self._classes = {}
self._instances = {}
+
+ # list of packages that are newer than the index.
+ self._needs_update = []
+
+ # Index of virtual dependencies
self._provider_index = None
+
+ # Cached list of package names.
self._all_package_names = None
# make sure the namespace for packages in this repo exists.
self._create_namespace()
+ # Unique filename for cache of virtual dependency providers
+ self._cache_file = 'providers/%s-index.yaml' % self.namespace
def _create_namespace(self):
"""Create this repo's namespace module and insert it into sys.modules.
@@ -429,10 +446,11 @@ class Repo(object):
"""
parent = None
- for l in range(1, len(self._names)+1):
+ for l in range(1, len(self._names) + 1):
ns = '.'.join(self._names[:l])
- if not ns in sys.modules:
- module = _make_namespace_module(ns)
+
+ if ns not in sys.modules:
+ module = SpackNamespace(ns)
module.__loader__ = self
sys.modules[ns] = module
@@ -442,14 +460,14 @@ class Repo(object):
# This ensures that we can do things like:
# import spack.pkg.builtin.mpich as mpich
if parent:
- modname = self._names[l-1]
- if not hasattr(parent, modname):
- setattr(parent, modname, module)
+ modname = self._names[l - 1]
+ setattr(parent, modname, module)
else:
- # no need to set up a module, but keep track of the parent.
+ # no need to set up a module
module = sys.modules[ns]
- parent = module
+ # but keep track of the parent in this loop
+ parent = module
def real_name(self, import_name):
"""Allow users to import Spack packages using Python identifiers.
@@ -476,13 +494,11 @@ class Repo(object):
return name
return None
-
def is_prefix(self, fullname):
"""True if fullname is a prefix of this Repo's namespace."""
parts = fullname.split('.')
return self._names[:len(parts)] == parts
-
def find_module(self, fullname, path=None):
"""Python find_module import hook.
@@ -498,7 +514,6 @@ class Repo(object):
return None
-
def load_module(self, fullname):
"""Python importer load hook.
@@ -510,7 +525,7 @@ class Repo(object):
namespace, dot, module_name = fullname.rpartition('.')
if self.is_prefix(fullname):
- module = _make_namespace_module(fullname)
+ module = SpackNamespace(fullname)
elif namespace == self.full_namespace:
real_name = self.real_name(module_name)
@@ -523,8 +538,12 @@ class Repo(object):
module.__loader__ = self
sys.modules[fullname] = module
- return module
+ if namespace != fullname:
+ parent = sys.modules[namespace]
+ if not hasattr(parent, module_name):
+ setattr(parent, module_name, module)
+ return module
def _read_config(self):
"""Check for a YAML config file in this db's root directory."""
@@ -533,40 +552,39 @@ class Repo(object):
yaml_data = yaml.load(reponame_file)
if (not yaml_data or 'repo' not in yaml_data or
- not isinstance(yaml_data['repo'], dict)):
- tty.die("Invalid %s in repository %s"
- % (repo_config_name, self.root))
+ not isinstance(yaml_data['repo'], dict)):
+ tty.die("Invalid %s in repository %s" % (
+ repo_config_name, self.root))
return yaml_data['repo']
- except exceptions.IOError, e:
+ except exceptions.IOError:
tty.die("Error reading %s when opening %s"
% (self.config_file, self.root))
-
@_autospec
def get(self, spec, new=False):
if spec.virtual:
raise UnknownPackageError(spec.name)
if spec.namespace and spec.namespace != self.namespace:
- raise UnknownPackageError("Repository %s does not contain package %s"
- % (self.namespace, spec.fullname))
+ raise UnknownPackageError(
+ "Repository %s does not contain package %s"
+ % (self.namespace, spec.fullname))
key = hash(spec)
if new or key not in self._instances:
package_class = self.get_pkg_class(spec.name)
try:
- copy = spec.copy() # defensive copy. Package owns its spec.
+ copy = spec.copy() # defensive copy. Package owns its spec.
self._instances[key] = package_class(copy)
- except Exception, e:
+ except Exception:
if spack.debug:
sys.excepthook(*sys.exc_info())
raise FailedConstructorError(spec.fullname, *sys.exc_info())
return self._instances[key]
-
@_autospec
def dump_provenance(self, spec, path):
"""Dump provenance information for a spec to a particular path.
@@ -579,8 +597,9 @@ class Repo(object):
raise UnknownPackageError(spec.name)
if spec.namespace and spec.namespace != self.namespace:
- raise UnknownPackageError("Repository %s does not contain package %s."
- % (self.namespace, spec.fullname))
+ raise UnknownPackageError(
+ "Repository %s does not contain package %s."
+ % (self.namespace, spec.fullname))
# Install any patch files needed by packages.
mkdirp(path)
@@ -595,34 +614,61 @@ class Repo(object):
# Install the package.py file itself.
install(self.filename_for_package_name(spec), path)
-
def purge(self):
"""Clear entire package instance cache."""
self._instances.clear()
+ def _update_provider_index(self):
+ # Check modification dates of all packages
+ self._fast_package_check()
- @_autospec
- def providers_for(self, vpkg_spec):
+ def read():
+ with open(self.index_file) as f:
+ self._provider_index = ProviderIndex.from_yaml(f)
+
+ # Read the old ProviderIndex, or make a new one.
+ key = self._cache_file
+ index_existed = spack.user_cache.init_entry(key)
+ if index_existed and not self._needs_update:
+ with spack.user_cache.read_transaction(key) as f:
+ self._provider_index = ProviderIndex.from_yaml(f)
+ else:
+ with spack.user_cache.write_transaction(key) as (old, new):
+ if old:
+ self._provider_index = ProviderIndex.from_yaml(old)
+ else:
+ self._provider_index = ProviderIndex()
+
+ for pkg_name in self._needs_update:
+ namespaced_name = '%s.%s' % (self.namespace, pkg_name)
+ self._provider_index.remove_provider(namespaced_name)
+ self._provider_index.update(namespaced_name)
+
+ self._provider_index.to_yaml(new)
+
+ @property
+ def provider_index(self):
+ """A provider index with names *specific* to this repo."""
if self._provider_index is None:
- self._provider_index = ProviderIndex(self.all_package_names())
+ self._update_provider_index()
+ return self._provider_index
- providers = self._provider_index.providers_for(vpkg_spec)
+ @_autospec
+ def providers_for(self, vpkg_spec):
+ providers = self.provider_index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError(vpkg_spec.name)
return providers
-
@_autospec
def extensions_for(self, extendee_spec):
return [p for p in self.all_packages() if p.extends(extendee_spec)]
-
def _check_namespace(self, spec):
"""Check that the spec's namespace is the same as this repository's."""
if spec.namespace and spec.namespace != self.namespace:
raise UnknownNamespaceError(spec.namespace)
-
@_autospec
def dirname_for_package_name(self, spec):
"""Get the directory name for a particular package. This is the
@@ -630,7 +676,6 @@ class Repo(object):
self._check_namespace(spec)
return join_path(self.packages_path, spec.name)
-
@_autospec
def filename_for_package_name(self, spec):
"""Get the filename for the module we should load for a particular
@@ -645,48 +690,95 @@ class Repo(object):
pkg_dir = self.dirname_for_package_name(spec.name)
return join_path(pkg_dir, package_file_name)
+ def _fast_package_check(self):
+ """List packages in the repo and check whether index is up to date.
- def all_package_names(self):
- """Returns a sorted list of all package names in the Repo."""
+ Both of these opreations require checking all `package.py`
+ files so we do them at the same time. We list the repo
+ directory and look at package.py files, and we compare the
+ index modification date with the ost recently modified package
+ file, storing the result.
+
+ The implementation here should try to minimize filesystem
+ calls. At the moment, it is O(number of packages) and makes
+ about one stat call per package. This is resonably fast, and
+ avoids actually importing packages in Spack, which is slow.
+
+ """
if self._all_package_names is None:
self._all_package_names = []
+ # Get index modification time.
+ index_mtime = spack.user_cache.mtime(self._cache_file)
+
for pkg_name in os.listdir(self.packages_path):
# Skip non-directories in the package root.
pkg_dir = join_path(self.packages_path, pkg_name)
- if not os.path.isdir(pkg_dir):
- continue
-
- # Skip directories without a package.py in them.
- pkg_file = join_path(self.packages_path, pkg_name, package_file_name)
- if not os.path.isfile(pkg_file):
- continue
# Warn about invalid names that look like packages.
if not valid_module_name(pkg_name):
- tty.warn("Skipping package at %s. '%s' is not a valid Spack module name."
- % (pkg_dir, pkg_name))
+ msg = ("Skipping package at %s. "
+ "'%s' is not a valid Spack module name.")
+ tty.warn(msg % (pkg_dir, pkg_name))
+ continue
+
+ # construct the file name from the directory
+ pkg_file = join_path(
+ self.packages_path, pkg_name, package_file_name)
+
+ # Use stat here to avoid lots of calls to the filesystem.
+ try:
+ sinfo = os.stat(pkg_file)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ # No package.py file here.
+ continue
+ elif e.errno == errno.EACCES:
+ tty.warn("Can't read package file %s." % pkg_file)
+ continue
+ raise e
+
+ # if it's not a file, skip it.
+ if stat.S_ISDIR(sinfo.st_mode):
continue
# All checks passed. Add it to the list.
self._all_package_names.append(pkg_name)
+
+ # record the package if it is newer than the index.
+ if sinfo.st_mtime > index_mtime:
+ self._needs_update.append(pkg_name)
+
self._all_package_names.sort()
return self._all_package_names
+ def all_package_names(self):
+ """Returns a sorted list of all package names in the Repo."""
+ self._fast_package_check()
+ return self._all_package_names
def all_packages(self):
+ """Iterator over all packages in the repository.
+
+ Use this with care, because loading packages is slow.
+
+ """
for name in self.all_package_names():
yield self.get(name)
-
def exists(self, pkg_name):
"""Whether a package with the supplied name exists."""
- # This does a binary search in the sorted list.
- idx = bisect_left(self.all_package_names(), pkg_name)
- return (idx < len(self._all_package_names) and
- self._all_package_names[idx] == pkg_name)
+ if self._all_package_names:
+ # This does a binary search in the sorted list.
+ idx = bisect_left(self.all_package_names(), pkg_name)
+ return (idx < len(self._all_package_names) and
+ self._all_package_names[idx] == pkg_name)
+ # If we haven't generated the full package list, don't.
+ # Just check whether the file exists.
+ filename = self.filename_for_package_name(pkg_name)
+ return os.path.exists(filename)
def _get_pkg_module(self, pkg_name):
"""Create a module for a particular package.
@@ -719,7 +811,6 @@ class Repo(object):
return self._modules[pkg_name]
-
def get_pkg_class(self, pkg_name):
"""Get the class for the package out of its module.
@@ -727,6 +818,11 @@ class Repo(object):
package. Then extracts the package class from the module
according to Spack's naming convention.
"""
+ namespace, _, pkg_name = pkg_name.rpartition('.')
+ if namespace and (namespace != self.namespace):
+ raise InvalidNamespaceError('Invalid namespace for %s repo: %s'
+ % (self.namespace, namespace))
+
class_name = mod_to_class(pkg_name)
module = self._get_pkg_module(pkg_name)
@@ -736,15 +832,12 @@ class Repo(object):
return cls
-
def __str__(self):
return "[Repo '%s' at '%s']" % (self.namespace, self.root)
-
def __repr__(self):
return self.__str__()
-
def __contains__(self, pkg_name):
return self.exists(pkg_name)
@@ -753,30 +846,37 @@ def create_repo(root, namespace=None):
"""Create a new repository in root with the specified namespace.
If the namespace is not provided, use basename of root.
- Return the canonicalized path and the namespace of the created repository.
+ Return the canonicalized path and namespace of the created repository.
"""
root = canonicalize_path(root)
if not namespace:
namespace = os.path.basename(root)
if not re.match(r'\w[\.\w-]*', namespace):
- raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
+ raise InvalidNamespaceError(
+ "'%s' is not a valid namespace." % namespace)
existed = False
if os.path.exists(root):
if os.path.isfile(root):
- raise BadRepoError('File %s already exists and is not a directory' % root)
+ raise BadRepoError('File %s already exists and is not a directory'
+ % root)
elif os.path.isdir(root):
if not os.access(root, os.R_OK | os.W_OK):
- raise BadRepoError('Cannot create new repo in %s: cannot access directory.' % root)
+ raise BadRepoError(
+ 'Cannot create new repo in %s: cannot access directory.'
+ % root)
if os.listdir(root):
- raise BadRepoError('Cannot create new repo in %s: directory is not empty.' % root)
+ raise BadRepoError(
+ 'Cannot create new repo in %s: directory is not empty.'
+ % root)
existed = True
full_path = os.path.realpath(root)
parent = os.path.dirname(full_path)
if not os.access(parent, os.R_OK | os.W_OK):
- raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
+ raise BadRepoError(
+ "Cannot create repository in %s: can't access parent!" % root)
try:
config_path = os.path.join(root, repo_config_name)
@@ -827,6 +927,7 @@ class PackageLoadError(spack.error.SpackError):
class UnknownPackageError(PackageLoadError):
"""Raised when we encounter a package spack doesn't have."""
+
def __init__(self, name, repo=None):
msg = None
if repo:
@@ -839,6 +940,7 @@ class UnknownPackageError(PackageLoadError):
class UnknownNamespaceError(PackageLoadError):
"""Raised when we encounter an unknown namespace"""
+
def __init__(self, namespace):
super(UnknownNamespaceError, self).__init__(
"Unknown namespace: %s" % namespace)
@@ -846,6 +948,7 @@ class UnknownNamespaceError(PackageLoadError):
class FailedConstructorError(PackageLoadError):
"""Raised when a package's class constructor fails."""
+
def __init__(self, name, exc_type, exc_obj, exc_tb):
super(FailedConstructorError, self).__init__(
"Class constructor failed for package '%s'." % name,
diff --git a/lib/spack/spack/resource.py b/lib/spack/spack/resource.py
index 24b675f8da..1d4d448298 100644
--- a/lib/spack/spack/resource.py
+++ b/lib/spack/spack/resource.py
@@ -31,9 +31,11 @@ package to enable optional features.
class Resource(object):
+ """Represents an optional resource to be fetched by a package.
+
+ Aggregates a name, a fetcher, a destination and a placement.
"""
- Represents an optional resource. Aggregates a name, a fetcher, a destination and a placement
- """
+
def __init__(self, name, fetcher, destination, placement):
self.name = name
self.fetcher = fetcher
diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py
new file mode 100644
index 0000000000..de45ea921f
--- /dev/null
+++ b/lib/spack/spack/schema/__init__.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""This module contains jsonschema files for all of Spack's YAML formats.
+"""
+from llnl.util.lang import list_modules
+
+# Automatically bring in all sub-modules
+__all__ = []
+for mod in list_modules(__path__[0]):
+ __import__('%s.%s' % (__name__, mod))
+ __all__.append(mod)
diff --git a/lib/spack/spack/schema/compilers.py b/lib/spack/spack/schema/compilers.py
new file mode 100644
index 0000000000..2ffac03a66
--- /dev/null
+++ b/lib/spack/spack/schema/compilers.py
@@ -0,0 +1,80 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for compiler configuration files."""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack compiler configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ 'compilers:?': { # optional colon for overriding site config.
+ 'type': 'array',
+ 'items': {
+ 'compiler': {
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'required': [
+ 'paths', 'spec', 'modules', 'operating_system'],
+ 'properties': {
+ 'paths': {
+ 'type': 'object',
+ 'required': ['cc', 'cxx', 'f77', 'fc'],
+ 'additionalProperties': False,
+ 'properties': {
+ 'cc': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cxx': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'f77': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'fc': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cxxflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'fflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cppflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'ldflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'ldlibs': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]}}},
+ 'spec': {'type': 'string'},
+ 'operating_system': {'type': 'string'},
+ 'alias': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'modules': {'anyOf': [{'type': 'string'},
+ {'type': 'null'},
+ {'type': 'array'}]}
+ },
+ },
+ },
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/mirrors.py b/lib/spack/spack/schema/mirrors.py
new file mode 100644
index 0000000000..ff599b9c7d
--- /dev/null
+++ b/lib/spack/spack/schema/mirrors.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for mirror configuration files."""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack mirror configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'mirrors:?': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': {
+ 'type': 'string'},
+ },
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py
new file mode 100644
index 0000000000..f8066919f1
--- /dev/null
+++ b/lib/spack/spack/schema/modules.py
@@ -0,0 +1,158 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for mirror configuration files."""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack module file configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'definitions': {
+ 'array_of_strings': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'
+ }
+ },
+ 'dictionary_of_strings': {
+ 'type': 'object',
+ 'patternProperties': {
+ r'\w[\w-]*': { # key
+ 'type': 'string'
+ }
+ }
+ },
+ 'dependency_selection': {
+ 'type': 'string',
+ 'enum': ['none', 'direct', 'all']
+ },
+ 'module_file_configuration': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'filter': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'environment_blacklist': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'
+ }
+ }
+ }
+ },
+ 'autoload': {
+ '$ref': '#/definitions/dependency_selection'},
+ 'prerequisites': {
+ '$ref': '#/definitions/dependency_selection'},
+ 'conflict': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'load': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'suffixes': {
+ '$ref': '#/definitions/dictionary_of_strings'},
+ 'environment': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'set': {
+ '$ref': '#/definitions/dictionary_of_strings'},
+ 'unset': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'prepend_path': {
+ '$ref': '#/definitions/dictionary_of_strings'},
+ 'append_path': {
+ '$ref': '#/definitions/dictionary_of_strings'}
+ }
+ }
+ }
+ },
+ 'module_type_configuration': {
+ 'type': 'object',
+ 'default': {},
+ 'anyOf': [
+ {'properties': {
+ 'hash_length': {
+ 'type': 'integer',
+ 'minimum': 0,
+ 'default': 7
+ },
+ 'whitelist': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'blacklist': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'naming_scheme': {
+ 'type': 'string' # Can we be more specific here?
+ }
+ }},
+ {'patternProperties': {
+ r'\w[\w-]*': {
+ '$ref': '#/definitions/module_file_configuration'
+ }
+ }}
+ ]
+ }
+ },
+ 'patternProperties': {
+ r'modules:?': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'prefix_inspections': {
+ 'type': 'object',
+ 'patternProperties': {
+ # prefix-relative path to be inspected for existence
+ r'\w[\w-]*': {
+ '$ref': '#/definitions/array_of_strings'}}},
+ 'enable': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string',
+ 'enum': ['tcl', 'dotkit']}},
+ 'tcl': {
+ 'allOf': [
+ # Base configuration
+ {'$ref': '#/definitions/module_type_configuration'},
+ {} # Specific tcl extensions
+ ]},
+ 'dotkit': {
+ 'allOf': [
+ # Base configuration
+ {'$ref': '#/definitions/module_type_configuration'},
+ {} # Specific dotkit extensions
+ ]},
+ }
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py
new file mode 100644
index 0000000000..e19f3f533b
--- /dev/null
+++ b/lib/spack/spack/schema/packages.py
@@ -0,0 +1,86 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for packages.yaml configuration files."""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack package configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'packages:?': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': { # package name
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'version': {
+ 'type': 'array',
+ 'default': [],
+ # version strings
+ 'items': {'anyOf': [{'type': 'string'},
+ {'type': 'number'}]}},
+ 'compiler': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {'type': 'string'}}, # compiler specs
+ 'buildable': {
+ 'type': 'boolean',
+ 'default': True,
+ },
+ 'modules': {
+ 'type': 'object',
+ 'default': {},
+ },
+ 'providers': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {'type': 'string'}, }, }, },
+ 'paths': {
+ 'type': 'object',
+ 'default': {},
+ },
+ 'variants': {
+ 'oneOf': [
+ {'type': 'string'},
+ {'type': 'array',
+ 'items': {'type': 'string'}}],
+ },
+ },
+ },
+ },
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/repos.py b/lib/spack/spack/schema/repos.py
new file mode 100644
index 0000000000..9f01942422
--- /dev/null
+++ b/lib/spack/spack/schema/repos.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for repository configuration files."""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack repository configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'repos:?': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'},
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/targets.py b/lib/spack/spack/schema/targets.py
new file mode 100644
index 0000000000..312474cab4
--- /dev/null
+++ b/lib/spack/spack/schema/targets.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for target configuration files."""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack target configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'targets:?': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': { # target name
+ 'type': 'string',
+ },
+ },
+ },
+ },
+}
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 16b61236a9..316b7d6a8c 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1,4 +1,4 @@
-#
+##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
@@ -18,10 +18,10 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-#
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
"""
Spack allows very fine-grained control over how packages are installed and
over how they are built and configured. To make this easy, it has its own
@@ -95,27 +95,33 @@ thing. Spack uses ~variant in directory names and in the canonical form of
specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line.
"""
-import sys
-import hashlib
import base64
+import hashlib
+import imp
+import sys
from StringIO import StringIO
from operator import attrgetter
+
import yaml
from yaml.error import MarkedYAMLError
import llnl.util.tty as tty
+from llnl.util.filesystem import join_path
from llnl.util.lang import *
from llnl.util.tty.color import *
import spack
-import spack.parse
-import spack.error
+import spack.architecture
import spack.compilers as compilers
-
-from spack.version import *
-from spack.util.string import *
+import spack.error
+import spack.parse
+from spack.build_environment import get_path_from_module, load_module
+from spack.util.naming import mod_to_class
from spack.util.prefix import Prefix
-from spack.virtual import ProviderIndex
+from spack.util.string import *
+from spack.version import *
+from spack.provider_index import ProviderIndex
+
# Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
@@ -147,18 +153,13 @@ _separators = '[%s]' % ''.join(color_formats.keys())
every time we call str()"""
_any_version = VersionList([':'])
-
-def index_specs(specs):
- """Take a list of specs and return a dict of lists. Dict is
- keyed by spec name and lists include all specs with the
- same name.
- """
- spec_dict = {}
- for spec in specs:
- if spec.name not in spec_dict:
- spec_dict[spec.name] = []
- spec_dict[spec.name].append(spec)
- return spec_dict
+# Special types of dependencies.
+alldeps = ('build', 'link', 'run')
+nolink = ('build', 'run')
+special_types = {
+ 'alldeps': alldeps,
+ 'nolink': nolink,
+}
def colorize_spec(spec):
@@ -183,7 +184,6 @@ def colorize_spec(spec):
@key_ordering
class CompilerSpec(object):
-
"""The CompilerSpec field represents the compiler or range of compiler
versions that a package should be built with. CompilerSpecs have a
name and a version list. """
@@ -287,6 +287,33 @@ class CompilerSpec(object):
@key_ordering
+class DependencySpec(object):
+ """Dependencies can be one (or more) of several types:
+
+ - build: needs to be in the PATH at build time.
+ - link: is linked to and added to compiler flags.
+ - run: needs to be in the PATH for the package to run.
+
+ Fields:
+ - spec: the spack.spec.Spec description of a dependency.
+ - deptypes: strings representing the type of dependency this is.
+ """
+
+ def __init__(self, spec, deptypes):
+ self.spec = spec
+ self.deptypes = deptypes
+
+ def _cmp_key(self):
+ return self.spec
+
+ def copy(self):
+ return DependencySpec(self.spec.copy(), self.deptype)
+
+ def __str__(self):
+ return str(self.spec)
+
+
+@key_ordering
class VariantSpec(object):
"""Variants are named, build-time options for a package. Names depend
@@ -305,11 +332,10 @@ class VariantSpec(object):
return VariantSpec(self.name, self.value)
def __str__(self):
- if self.value in [True, False]:
- out = '+' if self.value else '~'
- return out + self.name
+ if type(self.value) == bool:
+ return '{0}{1}'.format('+' if self.value else '~', self.name)
else:
- return ' ' + self.name + "=" + self.value
+ return ' {0}={1}'.format(self.name, self.value)
class VariantMap(HashableMap):
@@ -418,16 +444,15 @@ class FlagMap(HashableMap):
return clone
def _cmp_key(self):
- return ''.join(str(key) + ' '.join(str(v) for v in value)
- for key, value in sorted(self.items()))
+ return tuple((k, tuple(v)) for k, v in sorted(self.iteritems()))
def __str__(self):
sorted_keys = filter(
lambda flag: self[flag] != [], sorted(self.keys()))
cond_symbol = ' ' if len(sorted_keys) > 0 else ''
- return cond_symbol + ' '.join(str(key) + '=\"' + ' '.join(str(f)
- for f in self[key]) + '\"'
- for key in sorted_keys)
+ return cond_symbol + ' '.join(
+ str(key) + '=\"' + ' '.join(
+ str(f) for f in self[key]) + '\"' for key in sorted_keys)
class DependencyMap(HashableMap):
@@ -436,11 +461,11 @@ class DependencyMap(HashableMap):
The DependencyMap is keyed by name. """
@property
def concrete(self):
- return all(d.concrete for d in self.values())
+ return all(d.spec.concrete for d in self.values())
def __str__(self):
return ''.join(
- ["^" + str(self[name]) for name in sorted(self.keys())])
+ ["^" + self[name].format() for name in sorted(self.keys())])
@key_ordering
@@ -468,13 +493,13 @@ class Spec(object):
# writes directly into this Spec object.
other = spec_list[0]
self.name = other.name
- self.dependents = other.dependents
self.versions = other.versions
self.architecture = other.architecture
self.compiler = other.compiler
self.compiler_flags = other.compiler_flags
self.compiler_flags.spec = self
- self.dependencies = other.dependencies
+ self._dependencies = other._dependencies
+ self._dependents = other._dependents
self.variants = other.variants
self.variants.spec = self
self.namespace = other.namespace
@@ -488,14 +513,58 @@ class Spec(object):
self._concrete = kwargs.get('concrete', False)
# Allow a spec to be constructed with an external path.
- self.external = kwargs.get('external', None)
+ self.external = kwargs.get('external', None)
+ self.external_module = kwargs.get('external_module', None)
# This allows users to construct a spec DAG with literals.
# Note that given two specs a and b, Spec(a) copies a, but
# Spec(a, b) will copy a but just add b as a dep.
for dep in dep_like:
spec = dep if isinstance(dep, Spec) else Spec(dep)
- self._add_dependency(spec)
+ # XXX(deptype): default deptypes
+ self._add_dependency(spec, ('build', 'link'))
+
+ def get_dependency(self, name):
+ dep = self._dependencies.get(name)
+ if dep is not None:
+ return dep
+ raise InvalidDependencyException(
+ self.name + " does not depend on " + comma_or(name))
+
+ def _deptype_norm(self, deptype):
+ if deptype is None:
+ return alldeps
+ # Force deptype to be a set object so that we can do set intersections.
+ if isinstance(deptype, str):
+ # Support special deptypes.
+ return special_types.get(deptype, (deptype,))
+ return deptype
+
+ def _find_deps(self, where, deptype):
+ deptype = self._deptype_norm(deptype)
+
+ return [dep.spec
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes)]
+
+ def dependencies(self, deptype=None):
+ return self._find_deps(self._dependencies, deptype)
+
+ def dependents(self, deptype=None):
+ return self._find_deps(self._dependents, deptype)
+
+ def _find_deps_dict(self, where, deptype):
+ deptype = self._deptype_norm(deptype)
+
+ return dict((dep.spec.name, dep)
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes))
+
+ def dependencies_dict(self, deptype=None):
+ return self._find_deps_dict(self._dependencies, deptype)
+
+ def dependents_dict(self, deptype=None):
+ return self._find_deps_dict(self._dependents, deptype)
#
# Private routines here are called by the parser when building a spec.
@@ -520,8 +589,33 @@ class Spec(object):
Known flags currently include "arch"
"""
valid_flags = FlagMap.valid_compiler_flags()
- if name == 'arch':
- self._set_architecture(value)
+ if name == 'arch' or name == 'architecture':
+ parts = value.split('-')
+ if len(parts) == 3:
+ platform, op_sys, target = parts
+ else:
+ platform, op_sys, target = None, None, value
+
+ assert(self.architecture.platform is None)
+ assert(self.architecture.platform_os is None)
+ assert(self.architecture.target is None)
+ assert(self.architecture.os_string is None)
+ assert(self.architecture.target_string is None)
+ self._set_platform(platform)
+ self._set_os(op_sys)
+ self._set_target(target)
+ elif name == 'platform':
+ self._set_platform(value)
+ elif name == 'os' or name == 'operating_system':
+ if self.architecture.platform:
+ self._set_os(value)
+ else:
+ self.architecture.os_string = value
+ elif name == 'target':
+ if self.architecture.platform:
+ self._set_target(value)
+ else:
+ self.architecture.target_string = value
elif name in valid_flags:
assert(self.compiler_flags is not None)
self.compiler_flags[name] = value.split()
@@ -535,28 +629,69 @@ class Spec(object):
"Spec for '%s' cannot have two compilers." % self.name)
self.compiler = compiler
- def _set_architecture(self, architecture):
- """Called by the parser to set the architecture."""
- if self.architecture:
- raise DuplicateArchitectureError(
- "Spec for '%s' cannot have two architectures." % self.name)
- self.architecture = architecture
-
- def _add_dependency(self, spec):
+ def _set_platform(self, value):
+ """Called by the parser to set the architecture platform"""
+ if isinstance(value, basestring):
+ mod_path = spack.platform_path
+ mod_string = 'spack.platformss'
+ names = list_modules(mod_path)
+ if value in names:
+ # Create a platform object from the name
+ mod_name = mod_string + value
+ path = join_path(mod_path, value) + '.py'
+ mod = imp.load_source(mod_name, path)
+ class_name = mod_to_class(value)
+ if not hasattr(mod, class_name):
+ tty.die(
+ 'No class %s defined in %s' % (class_name, mod_name))
+ cls = getattr(mod, class_name)
+ if not inspect.isclass(cls):
+ tty.die('%s.%s is not a class' % (mod_name, class_name))
+ platform = cls()
+ else:
+ tty.die("No platform class %s defined." % value)
+ else:
+ # The value is a platform
+ platform = value
+
+ self.architecture.platform = platform
+
+ # Set os and target if we previously got strings for them
+ if self.architecture.os_string:
+ self._set_os(self.architecture.os_string)
+ self.architecture.os_string = None
+ if self.architecture.target_string:
+ self._set_target(self.architecture.target_string)
+ self.architecture.target_string = None
+
+ def _set_os(self, value):
+ """Called by the parser to set the architecture operating system"""
+ arch = self.architecture
+ if arch.platform:
+ arch.platform_os = arch.platform.operating_system(value)
+
+ def _set_target(self, value):
+ """Called by the parser to set the architecture target"""
+ arch = self.architecture
+ if arch.platform:
+ arch.target = arch.platform.target(value)
+
+ def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
- if spec.name in self.dependencies:
+ if spec.name in self._dependencies:
raise DuplicateDependencyError(
"Cannot depend on '%s' twice" % spec)
- self.dependencies[spec.name] = spec
- spec.dependents[self.name] = self
+ self._dependencies[spec.name] = DependencySpec(spec, deptypes)
+ spec._dependents[self.name] = DependencySpec(self, deptypes)
#
# Public interface
#
@property
def fullname(self):
- return (('%s.%s' % (self.namespace, self.name)) if self.namespace else
- (self.name if self.name else ''))
+ return (
+ ('%s.%s' % (self.namespace, self.name)) if self.namespace else
+ (self.name if self.name else ''))
@property
def root(self):
@@ -565,15 +700,15 @@ class Spec(object):
installed). This will throw an assertion error if that is not
the case.
"""
- if not self.dependents:
+ if not self._dependents:
return self
# If the spec has multiple dependents, ensure that they all
# lead to the same place. Spack shouldn't deal with any DAGs
# with multiple roots, so something's wrong if we find one.
- depiter = iter(self.dependents.values())
- first_root = next(depiter).root
- assert(all(first_root is d.root for d in depiter))
+ depiter = iter(self._dependents.values())
+ first_root = next(depiter).spec.root
+ assert(all(first_root is d.spec.root for d in depiter))
return first_root
@property
@@ -585,7 +720,7 @@ class Spec(object):
"""Internal package call gets only the class object for a package.
Use this to just get package metadata.
"""
- return spack.repo.get_pkg_class(self.name)
+ return spack.repo.get_pkg_class(self.fullname)
@property
def virtual(self):
@@ -617,13 +752,24 @@ class Spec(object):
self.versions.concrete and
self.variants.concrete and
self.architecture and
- self.compiler and
- self.compiler.concrete and
+ self.architecture.concrete and
+ self.compiler and self.compiler.concrete and
self.compiler_flags.concrete and
- self.dependencies.concrete)
+ self._dependencies.concrete)
return self._concrete
- def traverse(self, visited=None, d=0, **kwargs):
+ def traverse(self, visited=None, deptype=None, **kwargs):
+ traversal = self.traverse_with_deptype(visited=visited,
+ deptype=deptype,
+ **kwargs)
+ if kwargs.get('depth', False):
+ return [(s[0], s[1].spec) for s in traversal]
+ else:
+ return [s.spec for s in traversal]
+
+ def traverse_with_deptype(self, visited=None, d=0, deptype=None,
+ deptype_query=None, _self_deptype=None,
+ **kwargs):
"""Generic traversal of the DAG represented by this spec.
This will yield each node in the spec. Options:
@@ -658,7 +804,7 @@ class Spec(object):
in the traversal.
root [=True]
- If false, this won't yield the root node, just its descendents.
+ If False, this won't yield the root node, just its descendents.
direction [=children|parents]
If 'children', does a traversal of this spec's children. If
@@ -675,6 +821,12 @@ class Spec(object):
direction = kwargs.get('direction', 'children')
order = kwargs.get('order', 'pre')
+ if deptype is None:
+ deptype = alldeps
+
+ if deptype_query is None:
+ deptype_query = ('link', 'run')
+
# Make sure kwargs have legal values; raise ValueError if not.
def validate(name, val, allowed_values):
if val not in allowed_values:
@@ -692,30 +844,37 @@ class Spec(object):
if key in visited and cover == 'nodes':
return
- # Determine whether and what to yield for this node.
+ def return_val(res):
+ return (d, res) if depth else res
+
yield_me = yield_root or d > 0
- result = (d, self) if depth else self
# Preorder traversal yields before successors
if yield_me and order == 'pre':
- yield result
+ yield return_val(DependencySpec(self, _self_deptype))
+
+ deps = self.dependencies_dict(deptype)
# Edge traversal yields but skips children of visited nodes
if not (key in visited and cover == 'edges'):
# This code determines direction and yields the children/parents
- successors = self.dependencies
+ successors = deps
if direction == 'parents':
- successors = self.dependents
+ successors = self.dependents_dict()
visited.add(key)
for name in sorted(successors):
child = successors[name]
- for elt in child.traverse(visited, d + 1, **kwargs):
+ children = child.spec.traverse_with_deptype(
+ visited, d=d + 1, deptype=deptype,
+ deptype_query=deptype_query,
+ _self_deptype=child.deptypes, **kwargs)
+ for elt in children:
yield elt
# Postorder traversal yields after successors
if yield_me and order == 'post':
- yield result
+ yield return_val(DependencySpec(self, _self_deptype))
@property
def short_spec(self):
@@ -740,6 +899,7 @@ class Spec(object):
if self._hash:
return self._hash[:length]
else:
+ # XXX(deptype): ignore 'build' dependencies here
yaml_text = yaml.dump(
self.to_node_dict(), default_flow_style=True, width=sys.maxint)
sha = hashlib.sha1(yaml_text)
@@ -749,32 +909,42 @@ class Spec(object):
return b32_hash
def to_node_dict(self):
+ d = {}
+
params = dict((name, v.value) for name, v in self.variants.items())
params.update(dict((name, value)
- for name, value in self.compiler_flags.items()))
- d = {
- 'parameters': params,
- 'arch': self.architecture,
- 'dependencies': dict((d, self.dependencies[d].dag_hash())
- for d in sorted(self.dependencies)),
- }
-
- # Older concrete specs do not have a namespace. Omit for
- # consistent hashing.
- if not self.concrete or self.namespace:
+ for name, value in self.compiler_flags.items()))
+
+ if params:
+ d['parameters'] = params
+
+ if self.dependencies():
+ deps = self.dependencies_dict(deptype=('link', 'run'))
+ d['dependencies'] = dict(
+ (name, {
+ 'hash': dspec.spec.dag_hash(),
+ 'type': [str(s) for s in dspec.deptypes]})
+ for name, dspec in deps.items())
+
+ if self.namespace:
d['namespace'] = self.namespace
+ if self.architecture:
+ # TODO: Fix the target.to_dict to account for the tuple
+ # Want it to be a dict of dicts
+ d['arch'] = self.architecture.to_dict()
+
if self.compiler:
d.update(self.compiler.to_dict())
- else:
- d['compiler'] = None
- d.update(self.versions.to_dict())
+
+ if self.versions:
+ d.update(self.versions.to_dict())
return {self.name: d}
def to_yaml(self, stream=None):
node_list = []
- for s in self.traverse(order='pre'):
+ for s in self.traverse(order='pre', deptype=('link', 'run')):
node = s.to_node_dict()
node[s.name]['hash'] = s.dag_hash()
node_list.append(node)
@@ -788,16 +958,18 @@ class Spec(object):
spec = Spec(name)
spec.namespace = node.get('namespace', None)
- spec.versions = VersionList.from_dict(node)
- spec.architecture = node['arch']
+ spec._hash = node.get('hash', None)
- if 'hash' in node:
- spec._hash = node['hash']
+ if 'version' in node or 'versions' in node:
+ spec.versions = VersionList.from_dict(node)
- if node['compiler'] is None:
- spec.compiler = None
- else:
+ if 'arch' in node:
+ spec.architecture = spack.architecture.arch_from_dict(node['arch'])
+
+ if 'compiler' in node:
spec.compiler = CompilerSpec.from_dict(node)
+ else:
+ spec.compiler = None
if 'parameters' in node:
for name, value in node['parameters'].items():
@@ -805,18 +977,41 @@ class Spec(object):
spec.compiler_flags[name] = value
else:
spec.variants[name] = VariantSpec(name, value)
+
elif 'variants' in node:
for name, value in node['variants'].items():
spec.variants[name] = VariantSpec(name, value)
for name in FlagMap.valid_compiler_flags():
spec.compiler_flags[name] = []
- else:
- raise SpackRecordError(
- "Did not find a valid format for variants in YAML file")
+
+ # Don't read dependencies here; from_node_dict() is used by
+ # from_yaml() to read the root *and* each dependency spec.
return spec
@staticmethod
+ def read_yaml_dep_specs(dependency_dict):
+ """Read the DependencySpec portion of a YAML-formatted Spec.
+
+ This needs to be backward-compatible with older spack spec
+ formats so that reindex will work on old specs/databases.
+ """
+ for dep_name, elt in dependency_dict.items():
+ if isinstance(elt, basestring):
+ # original format, elt is just the dependency hash.
+ dag_hash, deptypes = elt, ['build', 'link']
+ elif isinstance(elt, tuple):
+ # original deptypes format: (used tuples, not future-proof)
+ dag_hash, deptypes = elt
+ elif isinstance(elt, dict):
+ # new format: elements of dependency spec are keyed.
+ dag_hash, deptypes = elt['hash'], elt['type']
+ else:
+ raise SpecError("Couldn't parse dependency types in spec.")
+
+ yield dep_name, dag_hash, list(deptypes)
+
+ @staticmethod
def from_yaml(stream):
"""Construct a spec from YAML.
@@ -827,25 +1022,34 @@ class Spec(object):
represent more than the DAG does.
"""
- deps = {}
- spec = None
-
try:
yfile = yaml.load(stream)
- except MarkedYAMLError, e:
+ except MarkedYAMLError as e:
raise SpackYAMLError("error parsing YAML spec:", str(e))
- for node in yfile['spec']:
- name = next(iter(node))
- dep = Spec.from_node_dict(node)
- if not spec:
- spec = dep
- deps[dep.name] = dep
+ nodes = yfile['spec']
+
+ # Read nodes out of list. Root spec is the first element;
+ # dependencies are the following elements.
+ dep_list = [Spec.from_node_dict(node) for node in nodes]
+ if not dep_list:
+ raise SpecError("YAML spec contains no nodes.")
+ deps = dict((spec.name, spec) for spec in dep_list)
+ spec = dep_list[0]
- for node in yfile['spec']:
+ for node in nodes:
+ # get dependency dict from the node.
name = next(iter(node))
- for dep_name in node[name]['dependencies']:
- deps[name].dependencies[dep_name] = deps[dep_name]
+
+ if 'dependencies' not in node[name]:
+ continue
+
+ yaml_deps = node[name]['dependencies']
+ for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
+ # Fill in dependencies by looking them up by name in deps dict
+ deps[name]._dependencies[dname] = DependencySpec(
+ deps[dname], set(dtypes))
+
return spec
def _concretize_helper(self, presets=None, visited=None):
@@ -865,13 +1069,12 @@ class Spec(object):
changed = False
# Concretize deps first -- this is a bottom-up process.
- for name in sorted(self.dependencies.keys()):
- changed |= self.dependencies[
- name]._concretize_helper(presets, visited)
+ for name in sorted(self._dependencies.keys()):
+ changed |= self._dependencies[
+ name].spec._concretize_helper(presets, visited)
if self.name in presets:
changed |= self.constrain(presets[self.name])
-
else:
# Concretize virtual dependencies last. Because they're added
# to presets below, their constraints will all be merged, but we'll
@@ -892,13 +1095,16 @@ class Spec(object):
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert(self.virtual)
- for name, dependent in self.dependents.items():
+ for name, dep_spec in self._dependents.items():
+ dependent = dep_spec.spec
+ deptypes = dep_spec.deptypes
+
# remove self from all dependents.
- del dependent.dependencies[self.name]
+ del dependent._dependencies[self.name]
# add the replacement, unless it is already a dep of dependent.
- if concrete.name not in dependent.dependencies:
- dependent._add_dependency(concrete)
+ if concrete.name not in dependent._dependencies:
+ dependent._add_dependency(concrete, deptypes)
def _replace_node(self, replacement):
"""Replace this spec with another.
@@ -909,13 +1115,15 @@ class Spec(object):
to be normalized.
"""
- for name, dependent in self.dependents.items():
- del dependent.dependencies[self.name]
- dependent._add_dependency(replacement)
+ for name, dep_spec in self._dependents.items():
+ dependent = dep_spec.spec
+ deptypes = dep_spec.deptypes
+ del dependent._dependencies[self.name]
+ dependent._add_dependency(replacement, deptypes)
- for name, dep in self.dependencies.items():
- del dep.dependents[self.name]
- del self.dependencies[dep.name]
+ for name, dep_spec in self._dependencies.items():
+ del dep_spec.spec.dependents[self.name]
+ del self._dependencies[dep.name]
def _expand_virtual_packages(self):
"""Find virtual packages in this spec, replace them with providers,
@@ -935,12 +1143,14 @@ class Spec(object):
a problem.
"""
# Make an index of stuff this spec already provides
+ # XXX(deptype): 'link' and 'run'?
self_index = ProviderIndex(self.traverse(), restrict=True)
-
changed = False
done = False
+
while not done:
done = True
+ # XXX(deptype): 'link' and 'run'?
for spec in list(self.traverse()):
replacement = None
if spec.virtual:
@@ -979,11 +1189,12 @@ class Spec(object):
continue
# If replacement is external then trim the dependencies
- if replacement.external:
- if (spec.dependencies):
+ if replacement.external or replacement.external_module:
+ if (spec._dependencies):
changed = True
- spec.dependencies = DependencyMap()
- replacement.dependencies = DependencyMap()
+ spec._dependencies = DependencyMap()
+ replacement._dependencies = DependencyMap()
+ replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
def feq(cfield, sfield):
@@ -994,9 +1205,11 @@ class Spec(object):
feq(replacement.versions, spec.versions) and
feq(replacement.compiler, spec.compiler) and
feq(replacement.architecture, spec.architecture) and
- feq(replacement.dependencies, spec.dependencies) and
+ feq(replacement._dependencies, spec._dependencies) and
feq(replacement.variants, spec.variants) and
- feq(replacement.external, spec.external)):
+ feq(replacement.external, spec.external) and
+ feq(replacement.external_module,
+ spec.external_module)):
continue
# Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in
@@ -1041,7 +1254,7 @@ class Spec(object):
changed = any(changes)
force = True
- for s in self.traverse():
+ for s in self.traverse(deptype_query=alldeps):
# After concretizing, assign namespaces to anything left.
# Note that this doesn't count as a "change". The repository
# configuration is constant throughout a spack run, and
@@ -1053,6 +1266,15 @@ class Spec(object):
if s.namespace is None:
s.namespace = spack.repo.repo_for_pkg(s.name).namespace
+ for s in self.traverse(root=False):
+ if s.external_module:
+ compiler = spack.compilers.compiler_for_spec(
+ s.compiler, s.architecture)
+ for mod in compiler.modules:
+ load_module(mod)
+
+ s.external = get_path_from_module(s.external_module)
+
# Mark everything in the spec as concrete, as well.
self._mark_concrete()
@@ -1062,7 +1284,7 @@ class Spec(object):
Only for internal use -- client code should use "concretize"
unless there is a need to force a spec to be concrete.
"""
- for s in self.traverse():
+ for s in self.traverse(deptype_query=alldeps):
s._normal = True
s._concrete = True
@@ -1075,6 +1297,13 @@ class Spec(object):
return clone
def flat_dependencies(self, **kwargs):
+ flat_deps = DependencyMap()
+ flat_deps_deptypes = self.flat_dependencies_with_deptype(**kwargs)
+ for name, depspec in flat_deps_deptypes.items():
+ flat_deps[name] = depspec.spec
+ return flat_deps
+
+ def flat_dependencies_with_deptype(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
dependencies with their constraints merged.
@@ -1085,27 +1314,35 @@ class Spec(object):
returns them.
"""
copy = kwargs.get('copy', True)
+ deptype_query = kwargs.get('deptype_query')
flat_deps = DependencyMap()
try:
- for spec in self.traverse(root=False):
+ deptree = self.traverse_with_deptype(root=False,
+ deptype_query=deptype_query)
+ for depspec in deptree:
+ spec = depspec.spec
+ deptypes = depspec.deptypes
+
if spec.name not in flat_deps:
if copy:
- flat_deps[spec.name] = spec.copy(deps=False)
+ dep_spec = DependencySpec(spec.copy(deps=False),
+ deptypes)
else:
- flat_deps[spec.name] = spec
+ dep_spec = DependencySpec(spec, deptypes)
+ flat_deps[spec.name] = dep_spec
else:
- flat_deps[spec.name].constrain(spec)
+ flat_deps[spec.name].spec.constrain(spec)
if not copy:
- for dep in flat_deps.values():
- dep.dependencies.clear()
- dep.dependents.clear()
- self.dependencies.clear()
+ for depspec in flat_deps.values():
+ depspec.spec._dependencies.clear()
+ depspec.spec._dependents.clear()
+ self._dependencies.clear()
return flat_deps
- except UnsatisfiableSpecError, e:
+ except UnsatisfiableSpecError as e:
# Here, the DAG contains two instances of the same package
# with inconsistent constraints. Users cannot produce
# inconsistent specs like this on the command line: the
@@ -1116,17 +1353,11 @@ class Spec(object):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
+ # XXX(deptype): use a deptype kwarg.
for spec in self.traverse():
dm[spec.name] = spec
return dm
- def flatten(self):
- """Pull all dependencies up to the root (this spec).
- Merge constraints for dependencies with the same name, and if they
- conflict, throw an exception. """
- for dep in self.flat_dependencies(copy=False):
- self._add_dependency(dep)
-
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
@@ -1146,7 +1377,7 @@ class Spec(object):
dep = Spec(name)
try:
dep.constrain(dep_spec)
- except UnsatisfiableSpecError, e:
+ except UnsatisfiableSpecError as e:
e.message = ("Conflicting conditional dependencies on"
"package %s for spec %s" % (self.name, self))
raise e
@@ -1183,7 +1414,8 @@ class Spec(object):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
- def _merge_dependency(self, dep, visited, spec_deps, provider_index):
+ def _merge_dependency(self, dep, deptypes, visited, spec_deps,
+ provider_index):
"""Merge the dependency into this spec.
This is the core of normalize(). There are some basic steps:
@@ -1210,7 +1442,9 @@ class Spec(object):
dep = provider
else:
index = ProviderIndex([dep], restrict=True)
- for vspec in (v for v in spec_deps.values() if v.virtual):
+ for vspec in (v.spec
+ for v in spec_deps.values()
+ if v.spec.virtual):
if index.providers_for(vspec):
vspec._replace_with(dep)
del spec_deps[vspec.name]
@@ -1223,25 +1457,25 @@ class Spec(object):
# If the spec isn't already in the set of dependencies, clone
# it from the package description.
if dep.name not in spec_deps:
- spec_deps[dep.name] = dep.copy()
+ spec_deps[dep.name] = DependencySpec(dep.copy(), deptypes)
changed = True
# Constrain package information with spec info
try:
- changed |= spec_deps[dep.name].constrain(dep)
+ changed |= spec_deps[dep.name].spec.constrain(dep)
- except UnsatisfiableSpecError, e:
+ except UnsatisfiableSpecError as e:
e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
- e.message %= (spec_deps[dep.name], dep.name, e.constraint_type,
- e.required, e.provided)
+ e.message %= (spec_deps[dep.name].spec, dep.name,
+ e.constraint_type, e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
- if dep.name not in self.dependencies:
- self._add_dependency(dependency)
+ if dep.name not in self._dependencies:
+ self._add_dependency(dependency.spec, dependency.deptypes)
- changed |= dependency._normalize_helper(
+ changed |= dependency.spec._normalize_helper(
visited, spec_deps, provider_index)
return changed
@@ -1253,7 +1487,7 @@ class Spec(object):
# if we descend into a virtual spec, there's nothing more
# to normalize. Concretize will finish resolving it later.
- if self.virtual or self.external:
+ if self.virtual or self.external or self.external_module:
return False
# Combine constraints from package deps with constraints from
@@ -1267,10 +1501,11 @@ class Spec(object):
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
+ deptypes = pkg._deptypes[dep_name]
# If pkg_dep is a dependency, merge it.
if pkg_dep:
changed |= self._merge_dependency(
- pkg_dep, visited, spec_deps, provider_index)
+ pkg_dep, deptypes, visited, spec_deps, provider_index)
any_change |= changed
return any_change
@@ -1300,13 +1535,14 @@ class Spec(object):
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
-
# Get all the dependencies into one DependencyMap
- spec_deps = self.flat_dependencies(copy=False)
+ spec_deps = self.flat_dependencies_with_deptype(
+ copy=False, deptype_query=alldeps)
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
- provider_index = ProviderIndex(spec_deps.values(), restrict=True)
+ provider_index = ProviderIndex(
+ [s.spec for s in spec_deps.values()], restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
@@ -1338,7 +1574,7 @@ class Spec(object):
UnsupportedCompilerError.
"""
for spec in self.traverse():
- # Don't get a package for a virtual name.
+ # raise an UnknownPackageError if the spec's package isn't real.
if (not spec.virtual) and spec.name:
spack.repo.get(spec.fullname)
@@ -1365,8 +1601,8 @@ class Spec(object):
raise UnsatisfiableSpecNameError(self.name, other.name)
if (other.namespace is not None and
- self.namespace is not None and
- other.namespace != self.namespace):
+ self.namespace is not None and
+ other.namespace != self.namespace):
raise UnsatisfiableSpecNameError(self.fullname, other.fullname)
if not self.versions.overlaps(other.versions):
@@ -1378,10 +1614,18 @@ class Spec(object):
raise UnsatisfiableVariantSpecError(self.variants[v],
other.variants[v])
- if self.architecture is not None and other.architecture is not None:
- if self.architecture != other.architecture:
- raise UnsatisfiableArchitectureSpecError(self.architecture,
- other.architecture)
+ # TODO: Check out the logic here
+ sarch, oarch = self.architecture, other.architecture
+ if sarch is not None and oarch is not None:
+ if sarch.platform is not None and oarch.platform is not None:
+ if sarch.platform != oarch.platform:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
+ if sarch.platform_os is not None and oarch.platform_os is not None:
+ if sarch.platform_os != oarch.platform_os:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
+ if sarch.target is not None and oarch.target is not None:
+ if sarch.target != oarch.target:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
changed = False
if self.compiler is not None and other.compiler is not None:
@@ -1395,9 +1639,18 @@ class Spec(object):
changed |= self.compiler_flags.constrain(other.compiler_flags)
- old = self.architecture
- self.architecture = self.architecture or other.architecture
- changed |= (self.architecture != old)
+ old = str(self.architecture)
+ sarch, oarch = self.architecture, other.architecture
+ if sarch is None or other.architecture is None:
+ self.architecture = sarch or oarch
+ else:
+ if sarch.platform is None or oarch.platform is None:
+ self.architecture.platform = sarch.platform or oarch.platform
+ if sarch.platform_os is None or oarch.platform_os is None:
+ sarch.platform_os = sarch.platform_os or oarch.platform_os
+ if sarch.target is None or oarch.target is None:
+ sarch.target = sarch.target or oarch.target
+ changed |= (str(self.architecture) != old)
if deps:
changed |= self._constrain_dependencies(other)
@@ -1408,7 +1661,7 @@ class Spec(object):
"""Apply constraints of other spec's dependencies to this spec."""
other = self._autospec(other)
- if not self.dependencies or not other.dependencies:
+ if not self._dependencies or not other._dependencies:
return False
# TODO: might want more detail than this, e.g. specific deps
@@ -1424,13 +1677,17 @@ class Spec(object):
# Update with additional constraints from other spec
for name in other.dep_difference(self):
- self._add_dependency(other[name].copy())
+ dep_spec_copy = other.get_dependency(name)
+ dep_copy = dep_spec_copy.spec
+ deptypes = dep_spec_copy.deptypes
+ self._add_dependency(dep_copy.copy(), deptypes)
changed = True
return changed
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
+ # XXX(deptype): handle deptypes via deptype kwarg.
common = set(
s.name for s in self.traverse(root=False))
common.intersection_update(
@@ -1499,8 +1756,8 @@ class Spec(object):
# namespaces either match, or other doesn't require one.
if (other.namespace is not None and
- self.namespace is not None and
- self.namespace != other.namespace):
+ self.namespace is not None and
+ self.namespace != other.namespace):
return False
if self.versions and other.versions:
if not self.versions.satisfies(other.versions, strict=strict):
@@ -1523,10 +1780,25 @@ class Spec(object):
# Architecture satisfaction is currently just string equality.
# If not strict, None means unconstrained.
- if self.architecture and other.architecture:
- if self.architecture != other.architecture:
+ sarch, oarch = self.architecture, other.architecture
+ if sarch and oarch:
+ if ((sarch.platform and
+ oarch.platform and
+ sarch.platform != oarch.platform) or
+
+ (sarch.platform_os and
+ oarch.platform_os and
+ sarch.platform_os != oarch.platform_os) or
+
+ (sarch.target and
+ oarch.target and
+ sarch.target != oarch.target)):
return False
- elif strict and (other.architecture and not self.architecture):
+
+ elif strict and ((oarch and not sarch) or
+ (oarch.platform and not sarch.platform) or
+ (oarch.platform_os and not sarch.platform_os) or
+ (oarch.target and not sarch.target)):
return False
if not self.compiler_flags.satisfies(
@@ -1550,13 +1822,14 @@ class Spec(object):
other = self._autospec(other)
if strict:
- if other.dependencies and not self.dependencies:
+ if other._dependencies and not self._dependencies:
return False
- if not all(dep in self.dependencies for dep in other.dependencies):
+ if not all(dep in self._dependencies
+ for dep in other._dependencies):
return False
- elif not self.dependencies or not other.dependencies:
+ elif not self._dependencies or not other._dependencies:
# if either spec doesn't restrict dependencies then both are
# compatible.
return True
@@ -1579,7 +1852,7 @@ class Spec(object):
# compatible with mpich2)
for spec in self.virtual_dependencies():
if (spec.name in other_index and
- not other_index.providers_for(spec)):
+ not other_index.providers_for(spec)):
return False
for spec in other.virtual_dependencies():
@@ -1601,7 +1874,7 @@ class Spec(object):
Options:
dependencies[=True]
- Whether deps should be copied too. Set to false to copy a
+ Whether deps should be copied too. Set to False to copy a
spec but not its dependencies.
"""
# We don't count dependencies as changes here
@@ -1614,7 +1887,9 @@ class Spec(object):
self.variants != other.variants and
self._normal != other._normal and
self.concrete != other.concrete and
- self.external != other.external)
+ self.external != other.external and
+ self.external_module != other.external_module and
+ self.compiler_flags != other.compiler_flags)
# Local node attributes get copied first.
self.name = other.name
@@ -1622,32 +1897,49 @@ class Spec(object):
self.architecture = other.architecture
self.compiler = other.compiler.copy() if other.compiler else None
if kwargs.get('cleardeps', True):
- self.dependents = DependencyMap()
- self.dependencies = DependencyMap()
+ self._dependents = DependencyMap()
+ self._dependencies = DependencyMap()
self.compiler_flags = other.compiler_flags.copy()
self.variants = other.variants.copy()
self.variants.spec = self
self.external = other.external
+ self.external_module = other.external_module
self.namespace = other.namespace
self._hash = other._hash
# If we copy dependencies, preserve DAG structure in the new spec
if kwargs.get('deps', True):
# This copies the deps from other using _dup(deps=False)
- new_nodes = other.flat_dependencies()
+ # XXX(deptype): We can keep different instances of specs here iff
+ # it is only a 'build' dependency (from its parent).
+ # All other instances must be shared (due to symbol
+ # and PATH contention). These should probably search
+ # for any existing installation which can satisfy the
+ # build and latch onto that because if 3 things need
+ # the same build dependency and it is *not*
+ # available, we only want to build it once.
+ new_nodes = other.flat_dependencies(deptype_query=alldeps)
new_nodes[self.name] = self
- # Hook everything up properly here by traversing.
- for spec in other.traverse(cover='nodes'):
- parent = new_nodes[spec.name]
- for child in spec.dependencies:
- if child not in parent.dependencies:
- parent._add_dependency(new_nodes[child])
+ stack = [other]
+ while stack:
+ cur_spec = stack.pop(0)
+ new_spec = new_nodes[cur_spec.name]
+
+ for depspec in cur_spec._dependencies.values():
+ stack.append(depspec.spec)
+
+ # XXX(deptype): add any new deptypes that may have appeared
+ # here.
+ if depspec.spec.name not in new_spec._dependencies:
+ new_spec._add_dependency(
+ new_nodes[depspec.spec.name], depspec.deptypes)
# Since we preserved structure, we can copy _normal safely.
self._normal = other._normal
self._concrete = other._concrete
self.external = other.external
+ self.external_module = other.external_module
return changed
def copy(self, **kwargs):
@@ -1684,7 +1976,7 @@ class Spec(object):
raise KeyError("No spec with name %s in %s" % (name, self))
def __contains__(self, spec):
- """True if this spec satisfis the provided spec, or if any dependency
+ """True if this spec satisfies the provided spec, or if any dependency
does. If the spec has no name, then we parse this one first.
"""
spec = self._autospec(spec)
@@ -1708,13 +2000,13 @@ class Spec(object):
if self.ne_node(other):
return False
- if len(self.dependencies) != len(other.dependencies):
+ if len(self._dependencies) != len(other._dependencies):
return False
- ssorted = [self.dependencies[name]
- for name in sorted(self.dependencies)]
- osorted = [other.dependencies[name]
- for name in sorted(other.dependencies)]
+ ssorted = [self._dependencies[name].spec
+ for name in sorted(self._dependencies)]
+ osorted = [other._dependencies[name].spec
+ for name in sorted(other._dependencies)]
for s, o in zip(ssorted, osorted):
visited_s = id(s) in vs
@@ -1767,9 +2059,10 @@ class Spec(object):
1. A tuple describing this node in the DAG.
2. The hash of each of this node's dependencies' cmp_keys.
"""
+ dep_dict = self.dependencies_dict(deptype=('link', 'run'))
return self._cmp_node() + (
- tuple(hash(self.dependencies[name])
- for name in sorted(self.dependencies)),)
+ tuple(hash(dep_dict[name])
+ for name in sorted(dep_dict)),)
def colorized(self):
return colorize_spec(self)
@@ -1862,7 +2155,7 @@ class Spec(object):
if self.variants:
write(fmt % str(self.variants), c)
elif c == '=':
- if self.architecture:
+ if self.architecture and str(self.architecture):
write(fmt % (' arch' + c + str(self.architecture)), c)
elif c == '#':
out.write('-' + fmt % (self.dag_hash(7)))
@@ -1920,8 +2213,8 @@ class Spec(object):
if self.variants:
write(fmt % str(self.variants), '+')
elif named_str == 'ARCHITECTURE':
- if self.architecture:
- write(fmt % str(self.architecture), '=')
+ if self.architecture and str(self.architecture):
+ write(fmt % str(self.architecture), ' arch=')
elif named_str == 'SHA1':
if self.dependencies:
out.write(fmt % str(self.dag_hash(7)))
@@ -1946,6 +2239,39 @@ class Spec(object):
def dep_string(self):
return ''.join("^" + dep.format() for dep in self.sorted_deps())
+ def __cmp__(self, other):
+ # Package name sort order is not configurable, always goes alphabetical
+ if self.name != other.name:
+ return cmp(self.name, other.name)
+
+ # Package version is second in compare order
+ pkgname = self.name
+ if self.versions != other.versions:
+ return spack.pkgsort.version_compare(
+ pkgname, self.versions, other.versions)
+
+ # Compiler is third
+ if self.compiler != other.compiler:
+ return spack.pkgsort.compiler_compare(
+ pkgname, self.compiler, other.compiler)
+
+ # Variants
+ if self.variants != other.variants:
+ return spack.pkgsort.variant_compare(
+ pkgname, self.variants, other.variants)
+
+ # Target
+ if self.architecture != other.architecture:
+ return spack.pkgsort.architecture_compare(
+ pkgname, self.architecture, other.architecture)
+
+ # Dependency is not configurable
+ if self._dependencies != other._dependencies:
+ return -1 if self._dependencies < other._dependencies else 1
+
+ # Equal specs
+ return 0
+
def __str__(self):
return self.format() + self.dep_string()
@@ -1959,12 +2285,14 @@ class Spec(object):
indent = kwargs.pop('indent', 0)
fmt = kwargs.pop('format', '$_$@$%@+$+$=')
prefix = kwargs.pop('prefix', None)
+ deptypes = kwargs.pop('deptypes', ('build', 'link'))
check_kwargs(kwargs, self.tree)
out = ""
cur_id = 0
ids = {}
- for d, node in self.traverse(order='pre', cover=cover, depth=True):
+ for d, node in self.traverse(
+ order='pre', cover=cover, depth=True, deptypes=deptypes):
if prefix is not None:
out += prefix(node)
out += " " * indent
@@ -2018,6 +2346,7 @@ class SpecLexer(spack.parse.Lexer):
# Lexer is always the same for every parser.
_lexer = SpecLexer()
+
class SpecParser(spack.parse.Parser):
def __init__(self):
@@ -2026,7 +2355,6 @@ class SpecParser(spack.parse.Parser):
def do_parse(self):
specs = []
-
try:
while self.next:
# TODO: clean this parsing up a bit
@@ -2055,10 +2383,13 @@ class SpecParser(spack.parse.Parser):
specs.append(self.spec(None))
self.previous = None
if self.accept(HASH):
- specs[-1]._add_dependency(self.spec_by_hash())
+ dep = self.spec_by_hash()
else:
self.expect(ID)
- specs[-1]._add_dependency(self.spec(self.token.value))
+ dep = self.spec(self.token.value)
+ # XXX(deptype): default deptypes
+ def_deptypes = ('build', 'link')
+ specs[-1]._add_dependency(dep, def_deptypes)
else:
# Attempt to construct an anonymous spec, but check that
@@ -2067,9 +2398,15 @@ class SpecParser(spack.parse.Parser):
# errors now?
specs.append(self.spec(None, True))
- except spack.parse.ParseError, e:
+ except spack.parse.ParseError as e:
raise SpecParseError(e)
+ # If the spec has an os or a target and no platform, give it
+ # the default platform
+ for spec in specs:
+ for s in spec.traverse():
+ if s.architecture.os_string or s.architecture.target_string:
+ s._set_platform(spack.architecture.platform())
return specs
def parse_compiler(self, text):
@@ -2111,12 +2448,13 @@ class SpecParser(spack.parse.Parser):
spec.name = spec_name
spec.versions = VersionList()
spec.variants = VariantMap(spec)
- spec.architecture = None
+ spec.architecture = spack.architecture.Arch()
spec.compiler = None
spec.external = None
+ spec.external_module = None
spec.compiler_flags = FlagMap(spec)
- spec.dependents = DependencyMap()
- spec.dependencies = DependencyMap()
+ spec._dependents = DependencyMap()
+ spec._dependencies = DependencyMap()
spec.namespace = spec_namespace
spec._hash = None
@@ -2189,12 +2527,6 @@ class SpecParser(spack.parse.Parser):
self.check_identifier()
return self.token.value
- def architecture(self):
- # TODO: Make this work properly as a subcase of variant (includes
- # adding names to grammar)
- self.expect(ID)
- return self.token.value
-
def version(self):
start = None
end = None
@@ -2500,15 +2832,9 @@ class SpackYAMLError(spack.error.SpackError):
super(SpackYAMLError, self).__init__(msg, str(yaml_error))
-class SpackRecordError(spack.error.SpackError):
-
- def __init__(self, msg):
- super(SpackRecordError, self).__init__(msg)
-
-
class AmbiguousHashError(SpecError):
def __init__(self, msg, *specs):
super(AmbiguousHashError, self).__init__(msg)
for spec in specs:
- print ' ', spec.format('$.$@$%@+$+$=$#')
+ print(' ', spec.format('$.$@$%@+$+$=$#'))
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index a76ec168ad..7676cb9ab6 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -37,6 +37,7 @@ import spack
import spack.config
import spack.fetch_strategy as fs
import spack.error
+from spack.version import *
STAGE_PREFIX = 'spack-stage-'
@@ -51,10 +52,12 @@ class Stage(object):
lifecycle looks like this:
```
- with Stage() as stage: # Context manager creates and destroys the stage directory
+ with Stage() as stage: # Context manager creates and destroys the
+ # stage directory
stage.fetch() # Fetch a source archive into the stage.
stage.expand_archive() # Expand the source archive.
- <install> # Build and install the archive. (handled by user of Stage)
+ <install> # Build and install the archive. (handled by
+ # user of Stage)
```
When used as a context manager, the stage is automatically
@@ -71,7 +74,8 @@ class Stage(object):
stage.create() # Explicitly create the stage directory.
stage.fetch() # Fetch a source archive into the stage.
stage.expand_archive() # Expand the source archive.
- <install> # Build and install the archive. (handled by user of Stage)
+ <install> # Build and install the archive. (handled by
+ # user of Stage)
finally:
stage.destroy() # Explicitly destroy the stage directory.
```
@@ -120,13 +124,17 @@ class Stage(object):
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
else:
- raise ValueError("Can't construct Stage without url or fetch strategy")
+ raise ValueError(
+ "Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self)
- self.default_fetcher = self.fetcher # self.fetcher can change with mirrors.
- self.skip_checksum_for_mirror = True # used for mirrored archives of repositories.
+ # self.fetcher can change with mirrors.
+ self.default_fetcher = self.fetcher
+ # used for mirrored archives of repositories.
+ self.skip_checksum_for_mirror = True
- # TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name
- # TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root
+ # TODO : this uses a protected member of tempfile, but seemed the only
+ # TODO : way to get a temporary name besides, the temporary link name
+ # TODO : won't be the same as the temporary stage area in tmp_root
self.name = name
if name is None:
self.name = STAGE_PREFIX + next(tempfile._get_candidate_names())
@@ -143,7 +151,6 @@ class Stage(object):
# Flag to decide whether to delete the stage folder on exit or not
self.keep = keep
-
def __enter__(self):
"""
Entering a stage context will create the stage directory
@@ -154,7 +161,6 @@ class Stage(object):
self.create()
return self
-
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Exiting from a stage context will delete the stage directory unless:
@@ -173,12 +179,10 @@ class Stage(object):
if exc_type is None and not self.keep:
self.destroy()
-
def _need_to_create_path(self):
"""Makes sure nothing weird has happened since the last time we
looked at path. Returns True if path already exists and is ok.
- Returns False if path needs to be created.
- """
+ Returns False if path needs to be created."""
# Path doesn't exist yet. Will need to create it.
if not os.path.exists(self.path):
return True
@@ -196,7 +200,8 @@ class Stage(object):
if spack.use_tmp_stage:
# If we're using a tmp dir, it's a link, and it points at the
# right spot, then keep it.
- if (real_path.startswith(real_tmp) and os.path.exists(real_path)):
+ if (real_path.startswith(real_tmp) and
+ os.path.exists(real_path)):
return False
else:
# otherwise, just unlink it and start over.
@@ -204,7 +209,8 @@ class Stage(object):
return True
else:
- # If we're not tmp mode, then it's a link and we want a directory.
+ # If we're not tmp mode, then it's a link and we want a
+ # directory.
os.unlink(self.path)
return True
@@ -215,10 +221,12 @@ class Stage(object):
"""Possible archive file paths."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
- paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
+ paths.append(os.path.join(
+ self.path, os.path.basename(self.fetcher.url)))
if self.mirror_path:
- paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
+ paths.append(os.path.join(
+ self.path, os.path.basename(self.mirror_path)))
return paths
@@ -227,10 +235,12 @@ class Stage(object):
"""Path to the source archive within this stage directory."""
paths = []
if isinstance(self.fetcher, fs.URLFetchStrategy):
- paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
+ paths.append(os.path.join(
+ self.path, os.path.basename(self.fetcher.url)))
if self.mirror_path:
- paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
+ paths.append(os.path.join(
+ self.path, os.path.basename(self.mirror_path)))
for path in paths:
if os.path.exists(path):
@@ -262,7 +272,8 @@ class Stage(object):
return None
def chdir(self):
- """Changes directory to the stage path. Or dies if it is not set up."""
+ """Changes directory to the stage path. Or dies if it is not set
+ up."""
if os.path.isdir(self.path):
os.chdir(self.path)
else:
@@ -304,6 +315,26 @@ class Stage(object):
# Add URL strategies for all the mirrors with the digest
for url in urls:
fetchers.insert(0, fs.URLFetchStrategy(url, digest))
+ fetchers.insert(0, spack.fetch_cache.fetcher(self.mirror_path,
+ digest))
+
+ # Look for the archive in list_url
+ package_name = os.path.dirname(self.mirror_path)
+ pkg = spack.repo.get(package_name)
+ if pkg.list_url is not None and pkg.url is not None:
+ try:
+ archive_version = spack.url.parse_version(
+ self.default_fetcher.url)
+ versions = pkg.fetch_remote_versions()
+ try:
+ url_from_list = versions[Version(archive_version)]
+ fetchers.append(fs.URLFetchStrategy(
+ url_from_list, digest))
+ except KeyError:
+ tty.msg("Can not find version %s in url_list" %
+ archive_version)
+ except:
+ tty.msg("Could not determine url from list_url.")
for fetcher in fetchers:
try:
@@ -323,7 +354,8 @@ class Stage(object):
def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
- if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
+ if self.fetcher is not self.default_fetcher and \
+ self.skip_checksum_for_mirror:
tty.warn("Fetching from mirror without a checksum!",
"This package is normally checked out from a version "
"control system, but it has been archived on a spack "
@@ -333,11 +365,13 @@ class Stage(object):
else:
self.fetcher.check()
+ def cache_local(self):
+ spack.fetch_cache.store(self.fetcher, self.mirror_path)
+
def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded
- archive. Fail if the stage is not set up or if the archive is not yet
- downloaded.
- """
+ archive. Fail if the stage is not set up or if the archive is not yet
+ downloaded."""
archive_dir = self.source_path
if not archive_dir:
self.fetcher.expand()
@@ -379,8 +413,8 @@ class Stage(object):
# Create the top-level stage directory
mkdirp(spack.stage_path)
remove_dead_links(spack.stage_path)
- # If a tmp_root exists then create a directory there and then link it in the stage area,
- # otherwise create the stage directory in self.path
+ # If a tmp_root exists then create a directory there and then link it
+ # in the stage area, otherwise create the stage directory in self.path
if self._need_to_create_path():
if self.tmp_root:
tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
@@ -402,6 +436,7 @@ class Stage(object):
class ResourceStage(Stage):
+
def __init__(self, url_or_fetch_strategy, root, resource, **kwargs):
super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs)
self.root_stage = root
@@ -411,12 +446,15 @@ class ResourceStage(Stage):
super(ResourceStage, self).expand_archive()
root_stage = self.root_stage
resource = self.resource
- placement = os.path.basename(self.source_path) if resource.placement is None else resource.placement
+ placement = os.path.basename(self.source_path) \
+ if resource.placement is None \
+ else resource.placement
if not isinstance(placement, dict):
placement = {'': placement}
# Make the paths in the dictionary absolute and link
for key, value in placement.iteritems():
- target_path = join_path(root_stage.source_path, resource.destination)
+ target_path = join_path(
+ root_stage.source_path, resource.destination)
destination_path = join_path(target_path, value)
source_path = join_path(self.source_path, key)
@@ -430,21 +468,23 @@ class ResourceStage(Stage):
if not os.path.exists(destination_path):
# Create a symlink
- tty.info('Moving resource stage\n\tsource : {stage}\n\tdestination : {destination}'.format(
- stage=source_path, destination=destination_path
- ))
+ tty.info('Moving resource stage\n\tsource : '
+ '{stage}\n\tdestination : {destination}'.format(
+ stage=source_path, destination=destination_path
+ ))
shutil.move(source_path, destination_path)
-@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy'])
+@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive',
+ 'restage', 'destroy', 'cache_local'])
class StageComposite:
- """
- Composite for Stage type objects. The first item in this composite is considered to be the root package, and
- operations that return a value are forwarded to it.
- """
+ """Composite for Stage type objects. The first item in this composite is
+ considered to be the root package, and operations that return a value are
+ forwarded to it."""
#
# __enter__ and __exit__ delegate to all stages in the composite.
#
+
def __enter__(self):
for item in self:
item.__enter__()
@@ -489,8 +529,11 @@ class DIYStage(object):
raise ChdirError("Setup failed: no such directory: " + self.path)
# DIY stages do nothing as context managers.
- def __enter__(self): pass
- def __exit__(self, exc_type, exc_val, exc_tb): pass
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
def chdir_to_source(self):
self.chdir()
@@ -511,6 +554,9 @@ class DIYStage(object):
# No need to destroy DIY stage.
pass
+ def cache_local(self):
+ tty.msg("Sources for DIY stages are not cached")
+
def _get_mirrors():
"""Get mirrors from spack configuration."""
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 891dc873fd..db683917b5 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
+import os
import llnl.util.tty as tty
import nose
@@ -31,15 +32,55 @@ from llnl.util.filesystem import join_path
from llnl.util.tty.colify import colify
from spack.test.tally_plugin import Tally
"""Names of tests to be included in Spack's test suite"""
-test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage',
- 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
- 'multimethod', 'install', 'package_sanity', 'config',
- 'directory_layout', 'pattern', 'python_version', 'git_fetch',
- 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
- 'cc', 'link_tree', 'spec_yaml', 'optional_deps',
- 'make_executable', 'configure_guess', 'lock', 'database',
- 'namespace_trie', 'yaml', 'sbang', 'environment', 'cmd.find',
- 'cmd.uninstall', 'cmd.test_install']
+
+# All the tests Spack knows about.
+# Keep these one per line so that it's easy to see changes in diffs.
+test_names = [
+ 'architecture',
+ 'build_system_guess',
+ 'cc',
+ 'cmd.find',
+ 'cmd.module',
+ 'cmd.test_install',
+ 'cmd.uninstall',
+ 'concretize',
+ 'concretize_preferences',
+ 'config',
+ 'database',
+ 'directory_layout',
+ 'environment',
+ 'file_cache',
+ 'git_fetch',
+ 'hg_fetch',
+ 'install',
+ 'link_tree',
+ 'lock',
+ 'make_executable',
+ 'mirror',
+ 'modules',
+ 'multimethod',
+ 'namespace_trie',
+ 'optional_deps',
+ 'package_sanity',
+ 'packages',
+ 'pattern',
+ 'python_version',
+ 'sbang',
+ 'spec_dag',
+ 'spec_semantics',
+ 'spec_syntax',
+ 'spec_yaml',
+ 'stage',
+ 'svn_fetch',
+ 'url_extrapolate',
+ 'url_parse',
+ 'url_substitution',
+ 'versions',
+ 'provider_index',
+ 'yaml',
+ # This test needs to be last until global compiler cache is fixed.
+ 'cmd.test_compiler_cmd',
+]
def list_tests():
@@ -50,6 +91,10 @@ def list_tests():
def run(names, outputDir, verbose=False):
"""Run tests with the supplied names. Names should be a list. If
it's empty, run ALL of Spack's tests."""
+ # Print output to stdout if verbose is 1.
+ if verbose:
+ os.environ['NOSE_NOCAPTURE'] = '1'
+
if not names:
names = test_names
else:
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
new file mode 100644
index 0000000000..22ddd4c97e
--- /dev/null
+++ b/lib/spack/spack/test/architecture.py
@@ -0,0 +1,163 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+""" Test checks if the architecture class is created correctly and also that
+ the functions are looking for the correct architecture name
+"""
+import itertools
+import os
+import platform as py_platform
+import spack
+import spack.architecture
+from spack.spec import *
+from spack.platforms.cray import Cray
+from spack.platforms.linux import Linux
+from spack.platforms.bgq import Bgq
+from spack.platforms.darwin import Darwin
+
+from spack.test.mock_packages_test import *
+
+
+class ArchitectureTest(MockPackagesTest):
+
+ def setUp(self):
+ super(ArchitectureTest, self).setUp()
+ self.platform = spack.architecture.platform()
+
+ def tearDown(self):
+ super(ArchitectureTest, self).tearDown()
+
+ def test_dict_functions_for_architecture(self):
+ arch = spack.architecture.Arch()
+ arch.platform = spack.architecture.platform()
+ arch.platform_os = arch.platform.operating_system('default_os')
+ arch.target = arch.platform.target('default_target')
+
+ d = arch.to_dict()
+
+ new_arch = spack.architecture.arch_from_dict(d)
+
+ self.assertEqual(arch, new_arch)
+
+ self.assertTrue(isinstance(arch, spack.architecture.Arch))
+ self.assertTrue(isinstance(arch.platform, spack.architecture.Platform))
+ self.assertTrue(isinstance(arch.platform_os,
+ spack.architecture.OperatingSystem))
+ self.assertTrue(isinstance(arch.target,
+ spack.architecture.Target))
+ self.assertTrue(isinstance(new_arch, spack.architecture.Arch))
+ self.assertTrue(isinstance(new_arch.platform,
+ spack.architecture.Platform))
+ self.assertTrue(isinstance(new_arch.platform_os,
+ spack.architecture.OperatingSystem))
+ self.assertTrue(isinstance(new_arch.target,
+ spack.architecture.Target))
+
+ def test_platform(self):
+ output_platform_class = spack.architecture.platform()
+ if os.path.exists('/opt/cray/craype'):
+ my_platform_class = Cray()
+ elif os.path.exists('/bgsys'):
+ my_platform_class = Bgq()
+ elif 'Linux' in py_platform.system():
+ my_platform_class = Linux()
+ elif 'Darwin' in py_platform.system():
+ my_platform_class = Darwin()
+
+ self.assertEqual(str(output_platform_class), str(my_platform_class))
+
+ def test_boolness(self):
+ # Make sure architecture reports that it's False when nothing's set.
+ arch = spack.architecture.Arch()
+ self.assertFalse(arch)
+
+ # Dummy architecture parts
+ plat = spack.architecture.platform()
+ plat_os = plat.operating_system('default_os')
+ plat_target = plat.target('default_target')
+
+ # Make sure architecture reports that it's True when anything is set.
+ arch = spack.architecture.Arch()
+ arch.platform = plat
+ self.assertTrue(arch)
+
+ arch = spack.architecture.Arch()
+ arch.platform_os = plat_os
+ self.assertTrue(arch)
+
+ arch = spack.architecture.Arch()
+ arch.target = plat_target
+ self.assertTrue(arch)
+
+ def test_user_front_end_input(self):
+ """Test when user inputs just frontend that both the frontend target
+ and frontend operating system match
+ """
+ frontend_os = self.platform.operating_system("frontend")
+ frontend_target = self.platform.target("frontend")
+ frontend_spec = Spec("libelf os=frontend target=frontend")
+ frontend_spec.concretize()
+ self.assertEqual(frontend_os, frontend_spec.architecture.platform_os)
+ self.assertEqual(frontend_target, frontend_spec.architecture.target)
+
+ def test_user_back_end_input(self):
+ """Test when user inputs backend that both the backend target and
+ backend operating system match
+ """
+ backend_os = self.platform.operating_system("backend")
+ backend_target = self.platform.target("backend")
+ backend_spec = Spec("libelf os=backend target=backend")
+ backend_spec.concretize()
+ self.assertEqual(backend_os, backend_spec.architecture.platform_os)
+ self.assertEqual(backend_target, backend_spec.architecture.target)
+
+ def test_user_defaults(self):
+ default_os = self.platform.operating_system("default_os")
+ default_target = self.platform.target("default_target")
+
+ default_spec = Spec("libelf") # default is no args
+ default_spec.concretize()
+ self.assertEqual(default_os, default_spec.architecture.platform_os)
+ self.assertEqual(default_target, default_spec.architecture.target)
+
+ def test_user_input_combination(self):
+ os_list = self.platform.operating_sys.keys()
+ target_list = self.platform.targets.keys()
+ additional = ["fe", "be", "frontend", "backend"]
+
+ os_list.extend(additional)
+ target_list.extend(additional)
+
+ combinations = itertools.product(os_list, target_list)
+ results = []
+ for arch in combinations:
+ o, t = arch
+ spec = Spec("libelf os=%s target=%s" % (o, t))
+ spec.concretize()
+ results.append(spec.architecture.platform_os ==
+ self.platform.operating_system(o))
+ results.append(spec.architecture.target == self.platform.target(t))
+ res = all(results)
+
+ self.assertTrue(res)
diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/build_system_guess.py
index bad3673e7a..e728a47cf4 100644
--- a/lib/spack/spack/test/configure_guess.py
+++ b/lib/spack/spack/test/build_system_guess.py
@@ -28,14 +28,14 @@ import tempfile
import unittest
from llnl.util.filesystem import *
-from spack.cmd.create import ConfigureGuesser
+from spack.cmd.create import BuildSystemGuesser
from spack.stage import Stage
from spack.test.mock_packages_test import *
from spack.util.executable import which
class InstallTest(unittest.TestCase):
- """Tests the configure guesser in spack create"""
+ """Tests the build system guesser in spack create"""
def setUp(self):
self.tar = which('tar')
@@ -44,12 +44,10 @@ class InstallTest(unittest.TestCase):
os.chdir(self.tmpdir)
self.stage = None
-
def tearDown(self):
shutil.rmtree(self.tmpdir, ignore_errors=True)
os.chdir(self.orig_dir)
-
def check_archive(self, filename, system):
mkdirp('archive')
touch(join_path('archive', filename))
@@ -60,24 +58,24 @@ class InstallTest(unittest.TestCase):
with Stage(url) as stage:
stage.fetch()
- guesser = ConfigureGuesser()
- guesser(stage)
+ guesser = BuildSystemGuesser()
+ guesser(stage, url)
self.assertEqual(system, guesser.build_system)
-
- def test_python(self):
- self.check_archive('setup.py', 'python')
-
-
def test_autotools(self):
self.check_archive('configure', 'autotools')
-
def test_cmake(self):
self.check_archive('CMakeLists.txt', 'cmake')
+ def test_scons(self):
+ self.check_archive('SConstruct', 'scons')
- def test_unknown(self):
- self.check_archive('foobar', 'unknown')
+ def test_python(self):
+ self.check_archive('setup.py', 'python')
+ def test_R(self):
+ self.check_archive('NAMESPACE', 'R')
+ def test_unknown(self):
+ self.check_archive('foobar', 'unknown')
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index ea2b164462..f3e4bb31d2 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -45,7 +45,8 @@ test_command = [
'-llib1', '-llib2',
'arg4',
'-Wl,--end-group',
- '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', '-rpath', '-Xlinker', '/fourth/rpath',
+ '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker',
+ '-rpath', '-Xlinker', '/fourth/rpath',
'-llib3', '-llib4',
'arg5', 'arg6']
@@ -67,7 +68,7 @@ class CompilerTest(unittest.TestCase):
os.environ['SPACK_FC'] = self.realcc
os.environ['SPACK_PREFIX'] = self.prefix
- os.environ['SPACK_ENV_PATH']="test"
+ os.environ['SPACK_ENV_PATH'] = "test"
os.environ['SPACK_DEBUG_LOG_DIR'] = "."
os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7"
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2"
@@ -97,16 +98,13 @@ class CompilerTest(unittest.TestCase):
if 'SPACK_DEPENDENCIES' in os.environ:
del os.environ['SPACK_DEPENDENCIES']
-
def tearDown(self):
shutil.rmtree(self.tmp_deps, True)
-
def check_cc(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.cc(*args, output=str).strip(), expected)
-
def check_cxx(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.cxx(*args, output=str).strip(), expected)
@@ -115,46 +113,46 @@ class CompilerTest(unittest.TestCase):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.fc(*args, output=str).strip(), expected)
-
def check_ld(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.ld(*args, output=str).strip(), expected)
-
def check_cpp(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.cpp(*args, output=str).strip(), expected)
-
def test_vcheck_mode(self):
self.check_cc('dump-mode', ['-I/include', '--version'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '-V'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '-v'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '-dumpversion'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '--version', '-c'], "vcheck")
- self.check_cc('dump-mode', ['-I/include', '-V', '-o', 'output'], "vcheck")
-
+ self.check_cc('dump-mode', ['-I/include',
+ '-V', '-o', 'output'], "vcheck")
def test_cpp_mode(self):
self.check_cc('dump-mode', ['-E'], "cpp")
self.check_cpp('dump-mode', [], "cpp")
-
def test_as_mode(self):
self.check_cc('dump-mode', ['-S'], "as")
-
def test_ccld_mode(self):
self.check_cc('dump-mode', [], "ccld")
self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld")
- self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath,foo'], "ccld")
- self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ccld")
-
+ self.check_cc('dump-mode', ['foo.c', '-o',
+ 'foo', '-Wl,-rpath,foo'], "ccld")
+ self.check_cc(
+ 'dump-mode',
+ ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'],
+ "ccld")
def test_ld_mode(self):
self.check_ld('dump-mode', [], "ld")
- self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld")
-
+ self.check_ld(
+ 'dump-mode',
+ ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'],
+ "ld")
def test_flags(self):
os.environ['SPACK_LDFLAGS'] = '-L foo'
@@ -176,10 +174,11 @@ class CompilerTest(unittest.TestCase):
# Test cppflags added properly in cpp mode
self.check_cpp('dump-args', test_command,
"cpp " +
- '-g -O1 ' +
- ' '.join(test_command))
+ '-g -O1 ' +
+ ' '.join(test_command))
- # Test ldflags, cppflags, and language specific flags are added in proper order
+ # Test ldflags, cppflags, and language specific flags are added in
+ # proper order
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
@@ -191,14 +190,14 @@ class CompilerTest(unittest.TestCase):
'-lfoo')
self.check_cxx('dump-args', test_command,
- self.realcc + ' ' +
- '-Wl,-rpath,' + self.prefix + '/lib ' +
- '-Wl,-rpath,' + self.prefix + '/lib64 ' +
- '-g -O1 ' +
- '-Werror ' +
- '-L foo ' +
- ' '.join(test_command) + ' ' +
- '-lfoo')
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-g -O1 ' +
+ '-Werror ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
self.check_fc('dump-args', test_command,
self.realcc + ' ' +
@@ -210,9 +209,8 @@ class CompilerTest(unittest.TestCase):
' '.join(test_command) + ' ' +
'-lfoo')
- os.environ['SPACK_LDFLAGS']=''
- os.environ['SPACK_LDLIBS']=''
-
+ os.environ['SPACK_LDFLAGS'] = ''
+ os.environ['SPACK_LDLIBS'] = ''
def test_dep_rpath(self):
"""Ensure RPATHs for root package are added."""
@@ -222,7 +220,6 @@ class CompilerTest(unittest.TestCase):
'-Wl,-rpath,' + self.prefix + '/lib64 ' +
' '.join(test_command))
-
def test_dep_include(self):
"""Ensure a single dependency include directory is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep4
@@ -233,7 +230,6 @@ class CompilerTest(unittest.TestCase):
'-I' + self.dep4 + '/include ' +
' '.join(test_command))
-
def test_dep_lib(self):
"""Ensure a single dependency RPATH is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep2
@@ -245,7 +241,6 @@ class CompilerTest(unittest.TestCase):
'-Wl,-rpath,' + self.dep2 + '/lib64 ' +
' '.join(test_command))
-
def test_all_deps(self):
"""Ensure includes and RPATHs for all deps are added. """
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
@@ -274,7 +269,6 @@ class CompilerTest(unittest.TestCase):
' '.join(test_command))
-
def test_ld_deps(self):
"""Ensure no (extra) -I args or -Wl, are passed in ld mode."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
index 371e9650e0..fa82db7733 100644
--- a/lib/spack/spack/test/cmd/find.py
+++ b/lib/spack/spack/test/cmd/find.py
@@ -27,11 +27,7 @@
import spack.cmd.find
import unittest
-
-class Bunch(object):
-
- def __init__(self, **kwargs):
- self.__dict__.update(kwargs)
+from spack.util.pattern import Bunch
class FindTest(unittest.TestCase):
diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py
new file mode 100644
index 0000000000..3a0ce32e6c
--- /dev/null
+++ b/lib/spack/spack/test/cmd/module.py
@@ -0,0 +1,91 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import argparse
+import os.path
+
+import spack.cmd.module as module
+import spack.modules as modules
+import spack.test.mock_database
+
+
+class TestModule(spack.test.mock_database.MockDatabase):
+
+ def _get_module_files(self, args):
+ return [modules.module_types[args.module_type](spec).file_name
+ for spec in args.specs]
+
+ def test_module_common_operations(self):
+ parser = argparse.ArgumentParser()
+ module.setup_parser(parser)
+
+ # Try to remove a non existing module [tcl]
+ args = parser.parse_args(['rm', 'doesnotexist'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+
+ # Remove existing modules [tcl]
+ args = parser.parse_args(['rm', '-y', 'mpileaks'])
+ module_files = self._get_module_files(args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ module.module(parser, args)
+ for item in module_files:
+ self.assertFalse(os.path.exists(item))
+
+ # Add them back [tcl]
+ args = parser.parse_args(['refresh', '-y', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+
+ # TODO : test the --delete-tree option
+ # TODO : this requires having a separate directory for test modules
+
+ # Try to find a module with multiple matches
+ args = parser.parse_args(['find', 'mpileaks'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+
+ # Try to find a module with no matches
+ args = parser.parse_args(['find', 'doesnotexist'])
+ self.assertRaises(SystemExit, module.module, parser, args)
+
+ # Try to find a module
+ args = parser.parse_args(['find', 'libelf'])
+ module.module(parser, args)
+
+ # Remove existing modules [dotkit]
+ args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks'])
+ module_files = self._get_module_files(args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ module.module(parser, args)
+ for item in module_files:
+ self.assertFalse(os.path.exists(item))
+
+ # Add them back [dotkit]
+ args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ self.assertTrue(os.path.exists(item))
+ # TODO : add tests for loads and find to check the prompt format
diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py
new file mode 100644
index 0000000000..fa806ee6f4
--- /dev/null
+++ b/lib/spack/spack/test/cmd/test_compiler_cmd.py
@@ -0,0 +1,82 @@
+import os
+import shutil
+from tempfile import mkdtemp
+
+from llnl.util.filesystem import set_executable, mkdirp
+
+import spack.spec
+import spack.cmd.compiler
+import spack.compilers
+from spack.version import Version
+from spack.test.mock_packages_test import *
+
+test_version = '4.5-spacktest'
+
+
+class MockArgs(object):
+
+ def __init__(self, add_paths=[], scope=None, compiler_spec=None, all=None):
+ self.add_paths = add_paths
+ self.scope = scope
+ self.compiler_spec = compiler_spec
+ self.all = all
+
+
+def make_mock_compiler():
+ """Make a directory containing a fake, but detectable compiler."""
+ mock_compiler_dir = mkdtemp()
+ bin_dir = os.path.join(mock_compiler_dir, 'bin')
+ mkdirp(bin_dir)
+
+ gcc_path = os.path.join(bin_dir, 'gcc')
+ gxx_path = os.path.join(bin_dir, 'g++')
+ gfortran_path = os.path.join(bin_dir, 'gfortran')
+
+ with open(gcc_path, 'w') as f:
+ f.write("""\
+#!/bin/sh
+
+for arg in "$@"; do
+ if [ "$arg" = -dumpversion ]; then
+ echo '%s'
+ fi
+done
+""" % test_version)
+
+ # Create some mock compilers in the temporary directory
+ set_executable(gcc_path)
+ shutil.copy(gcc_path, gxx_path)
+ shutil.copy(gcc_path, gfortran_path)
+
+ return mock_compiler_dir
+
+
+class CompilerCmdTest(MockPackagesTest):
+ """ Test compiler commands for add and remove """
+
+ def test_compiler_remove(self):
+ args = MockArgs(all=True, compiler_spec='gcc@4.5.0')
+ spack.cmd.compiler.compiler_remove(args)
+ compilers = spack.compilers.all_compilers()
+ self.assertTrue(spack.spec.CompilerSpec("gcc@4.5.0") not in compilers)
+
+ def test_compiler_add(self):
+ # compilers available by default.
+ old_compilers = set(spack.compilers.all_compilers())
+
+ # add our new compiler and find again.
+ compiler_dir = make_mock_compiler()
+
+ try:
+ args = MockArgs(add_paths=[compiler_dir])
+ spack.cmd.compiler.compiler_find(args)
+
+ # ensure new compiler is in there
+ new_compilers = set(spack.compilers.all_compilers())
+ new_compiler = new_compilers - old_compilers
+ self.assertTrue(new_compiler)
+ self.assertTrue(new_compiler.pop().version ==
+ Version(test_version))
+
+ finally:
+ shutil.rmtree(compiler_dir, ignore_errors=True)
diff --git a/lib/spack/spack/test/cmd/test_install.py b/lib/spack/spack/test/cmd/test_install.py
index d17e013ed2..39287d5d6d 100644
--- a/lib/spack/spack/test/cmd/test_install.py
+++ b/lib/spack/spack/test/cmd/test_install.py
@@ -22,18 +22,24 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import StringIO
import collections
-from contextlib import contextmanager
+import os
+import unittest
+import contextlib
-import StringIO
+import spack
+import spack.cmd
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+
# Monkey-patch open to write module files to a StringIO instance
-@contextmanager
+@contextlib.contextmanager
def mock_open(filename, mode):
if not mode == 'wb':
- raise RuntimeError('test.test_install : unexpected opening mode for monkey-patched open')
+ message = 'test.test_install : unexpected opening mode for mock_open'
+ raise RuntimeError(message)
FILE_REGISTRY[filename] = StringIO.StringIO()
@@ -44,31 +50,45 @@ def mock_open(filename, mode):
FILE_REGISTRY[filename] = handle.getvalue()
handle.close()
-import os
-import itertools
-import unittest
-
-import spack
-import spack.cmd
-
-# The use of __import__ is necessary to maintain a name with hyphen (which cannot be an identifier in python)
+# The use of __import__ is necessary to maintain a name with hyphen (which
+# cannot be an identifier in python)
test_install = __import__("spack.cmd.test-install", fromlist=['test_install'])
class MockSpec(object):
+
def __init__(self, name, version, hashStr=None):
- self.dependencies = {}
+ self._dependencies = {}
self.name = name
self.version = version
self.hash = hashStr if hashStr else hash((name, version))
+ def _deptype_norm(self, deptype):
+ if deptype is None:
+ return spack.alldeps
+ # Force deptype to be a tuple so that we can do set intersections.
+ if isinstance(deptype, str):
+ return (deptype,)
+ return deptype
+
+ def _find_deps(self, where, deptype):
+ deptype = self._deptype_norm(deptype)
+
+ return [dep.spec
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes)]
+
+ def dependencies(self, deptype=None):
+ return self._find_deps(self._dependencies, deptype)
+
+ def dependents(self, deptype=None):
+ return self._find_deps(self._dependents, deptype)
+
def traverse(self, order=None):
- for _, spec in self.dependencies.items():
- yield spec
+ for _, spec in self._dependencies.items():
+ yield spec.spec
yield self
- #allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
- #return set(itertools.chain([self], allDeps))
def dag_hash(self):
return self.hash
@@ -79,6 +99,7 @@ class MockSpec(object):
class MockPackage(object):
+
def __init__(self, spec, buildLogPath):
self.name = spec.name
self.spec = spec
@@ -90,6 +111,7 @@ class MockPackage(object):
class MockPackageDb(object):
+
def __init__(self, init=None):
self.specToPkg = {}
if init:
@@ -104,12 +126,13 @@ def mock_fetch_log(path):
specX = MockSpec('X', "1.2.0")
specY = MockSpec('Y', "2.3.8")
-specX.dependencies['Y'] = specY
+specX._dependencies['Y'] = spack.DependencySpec(specY, spack.alldeps)
pkgX = MockPackage(specX, 'logX')
pkgY = MockPackage(specY, 'logY')
class MockArgs(object):
+
def __init__(self, package):
self.package = package
self.jobs = None
@@ -145,7 +168,7 @@ class TestInstallTest(unittest.TestCase):
test_install.open = mock_open
# Clean FILE_REGISTRY
- FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+ FILE_REGISTRY.clear()
pkgX.installed = False
pkgY.installed = False
@@ -171,7 +194,7 @@ class TestInstallTest(unittest.TestCase):
spack.repo = self.saved_db
def test_installing_both(self):
- test_install.test_install(None, MockArgs('X') )
+ test_install.test_install(None, MockArgs('X'))
self.assertEqual(len(FILE_REGISTRY), 1)
for _, content in FILE_REGISTRY.items():
self.assertTrue('tests="2"' in content)
@@ -187,4 +210,5 @@ class TestInstallTest(unittest.TestCase):
self.assertTrue('tests="2"' in content)
self.assertTrue('failures="0"' in content)
self.assertTrue('errors="0"' in content)
- self.assertEqual(sum('skipped' in line for line in content.split('\n')), 2)
+ self.assertEqual(
+ sum('skipped' in line for line in content.split('\n')), 2)
diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py
index 9fffaace40..4ccb9ddbf4 100644
--- a/lib/spack/spack/test/cmd/uninstall.py
+++ b/lib/spack/spack/test/cmd/uninstall.py
@@ -28,6 +28,7 @@ from spack.cmd.uninstall import uninstall
class MockArgs(object):
+
def __init__(self, packages, all=False, force=False, dependents=False):
self.packages = packages
self.all = all
@@ -37,6 +38,7 @@ class MockArgs(object):
class TestUninstall(spack.test.mock_database.MockDatabase):
+
def test_uninstall(self):
parser = None
# Multiple matches
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 963481054e..8ecbddbda2 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -23,11 +23,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import spack
+import spack.architecture
from spack.spec import Spec, CompilerSpec
from spack.version import ver
from spack.concretize import find_spec
from spack.test.mock_packages_test import *
+
class ConcretizeTest(MockPackagesTest):
def check_spec(self, abstract, concrete):
@@ -58,7 +60,6 @@ class ConcretizeTest(MockPackagesTest):
if abstract.architecture and abstract.architecture.concrete:
self.assertEqual(abstract.architecture, concrete.architecture)
-
def check_concretize(self, abstract_spec):
abstract = Spec(abstract_spec)
concrete = abstract.concretized()
@@ -69,29 +70,24 @@ class ConcretizeTest(MockPackagesTest):
return concrete
-
def test_concretize_no_deps(self):
self.check_concretize('libelf')
self.check_concretize('libelf@0.8.13')
-
def test_concretize_dag(self):
self.check_concretize('callpath')
self.check_concretize('mpileaks')
self.check_concretize('libelf')
-
def test_concretize_variant(self):
self.check_concretize('mpich+debug')
self.check_concretize('mpich~debug')
self.check_concretize('mpich debug=2')
self.check_concretize('mpich')
-
def test_conretize_compiler_flags(self):
self.check_concretize('mpich cppflags="-O3"')
-
def test_concretize_preferred_version(self):
spec = self.check_concretize('python')
self.assertEqual(spec.versions, ver('2.7.11'))
@@ -99,7 +95,6 @@ class ConcretizeTest(MockPackagesTest):
spec = self.check_concretize('python@3.5.1')
self.assertEqual(spec.versions, ver('3.5.1'))
-
def test_concretize_with_virtual(self):
self.check_concretize('mpileaks ^mpi')
self.check_concretize('mpileaks ^mpi@:1.1')
@@ -110,7 +105,6 @@ class ConcretizeTest(MockPackagesTest):
self.check_concretize('mpileaks ^mpi@:1')
self.check_concretize('mpileaks ^mpi@1.2:2')
-
def test_concretize_with_restricted_virtual(self):
self.check_concretize('mpileaks ^mpich2')
@@ -141,97 +135,98 @@ class ConcretizeTest(MockPackagesTest):
concrete = self.check_concretize('mpileaks ^mpich2@1.3.1:1.4')
self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.3.1:1.4'))
-
def test_concretize_with_provides_when(self):
"""Make sure insufficient versions of MPI are not in providers list when
we ask for some advanced version.
"""
- self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
- for spec in spack.repo.providers_for('mpi@2.1')))
-
- self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
- for spec in spack.repo.providers_for('mpi@2.2')))
+ self.assertTrue(
+ not any(spec.satisfies('mpich2@:1.0')
+ for spec in spack.repo.providers_for('mpi@2.1')))
- self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
- for spec in spack.repo.providers_for('mpi@2.2')))
+ self.assertTrue(
+ not any(spec.satisfies('mpich2@:1.1')
+ for spec in spack.repo.providers_for('mpi@2.2')))
- self.assertTrue(not any(spec.satisfies('mpich@:1')
- for spec in spack.repo.providers_for('mpi@2')))
+ self.assertTrue(
+ not any(spec.satisfies('mpich@:1')
+ for spec in spack.repo.providers_for('mpi@2')))
- self.assertTrue(not any(spec.satisfies('mpich@:1')
- for spec in spack.repo.providers_for('mpi@3')))
-
- self.assertTrue(not any(spec.satisfies('mpich2')
- for spec in spack.repo.providers_for('mpi@3')))
+ self.assertTrue(
+ not any(spec.satisfies('mpich@:1')
+ for spec in spack.repo.providers_for('mpi@3')))
+ self.assertTrue(
+ not any(spec.satisfies('mpich2')
+ for spec in spack.repo.providers_for('mpi@3')))
def test_concretize_two_virtuals(self):
"""Test a package with multiple virtual dependencies."""
- s = Spec('hypre').concretize()
-
+ Spec('hypre').concretize()
def test_concretize_two_virtuals_with_one_bound(self):
"""Test a package with multiple virtual dependencies and one preset."""
- s = Spec('hypre ^openblas').concretize()
-
+ Spec('hypre ^openblas').concretize()
def test_concretize_two_virtuals_with_two_bound(self):
- """Test a package with multiple virtual dependencies and two of them preset."""
- s = Spec('hypre ^openblas ^netlib-lapack').concretize()
-
+ """Test a package with multiple virtual deps and two of them preset."""
+ Spec('hypre ^openblas ^netlib-lapack').concretize()
def test_concretize_two_virtuals_with_dual_provider(self):
"""Test a package with multiple virtual dependencies and force a provider
that provides both."""
- s = Spec('hypre ^openblas-with-lapack').concretize()
-
+ Spec('hypre ^openblas-with-lapack').concretize()
def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
- """Test a package with multiple virtual dependencies and force a provider
- that provides both, and another conflicting package that provides one."""
+ """Test a package with multiple virtual dependencies and force a
+ provider that provides both, and another conflicting package that
+ provides one.
+ """
s = Spec('hypre ^openblas-with-lapack ^netlib-lapack')
self.assertRaises(spack.spec.MultipleProviderError, s.concretize)
-
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
spec = Spec('callpath ^mpi@10.0')
- self.assertTrue('mpi' in spec.dependencies)
+ self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
- self.assertTrue('zmpi' in spec.dependencies)
- self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
+ self.assertTrue('zmpi' in spec._dependencies)
+ self.assertTrue(all('mpi' not in d._dependencies
+ for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)
- self.assertTrue('fake' in spec.dependencies['zmpi'])
-
+ self.assertTrue('fake' in spec._dependencies['zmpi'].spec)
def test_virtual_is_fully_expanded_for_mpileaks(self):
spec = Spec('mpileaks ^mpi@10.0')
- self.assertTrue('mpi' in spec.dependencies)
+ self.assertTrue('mpi' in spec._dependencies)
self.assertFalse('fake' in spec)
spec.concretize()
- self.assertTrue('zmpi' in spec.dependencies)
- self.assertTrue('callpath' in spec.dependencies)
- self.assertTrue('zmpi' in spec.dependencies['callpath'].dependencies)
- self.assertTrue('fake' in spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
-
- self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
+ self.assertTrue('zmpi' in spec._dependencies)
+ self.assertTrue('callpath' in spec._dependencies)
+ self.assertTrue(
+ 'zmpi' in spec._dependencies['callpath']
+ .spec._dependencies)
+ self.assertTrue(
+ 'fake' in spec._dependencies['callpath']
+ .spec._dependencies['zmpi']
+ .spec._dependencies)
+
+ self.assertTrue(
+ all('mpi' not in d._dependencies for d in spec.traverse()))
self.assertTrue('zmpi' in spec)
self.assertTrue('mpi' in spec)
-
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
spec = Spec('indirect_mpich')
spec.normalize()
spec.concretize()
-
def test_compiler_inheritance(self):
spec = Spec('mpileaks')
spec.normalize()
@@ -243,15 +238,28 @@ class ConcretizeTest(MockPackagesTest):
self.assertTrue(spec['libdwarf'].compiler.satisfies('clang'))
self.assertTrue(spec['libelf'].compiler.satisfies('clang'))
-
def test_external_package(self):
spec = Spec('externaltool%gcc')
spec.concretize()
- self.assertEqual(spec['externaltool'].external, '/path/to/external_tool')
+ self.assertEqual(
+ spec['externaltool'].external, '/path/to/external_tool')
self.assertFalse('externalprereq' in spec)
self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
+ def test_external_package_module(self):
+ # No tcl modules on darwin/linux machines
+ # TODO: improved way to check for this.
+ platform = spack.architecture.platform().name
+ if (platform == 'darwin' or platform == 'linux'):
+ return
+
+ spec = Spec('externalmodule')
+ spec.concretize()
+ self.assertEqual(
+ spec['externalmodule'].external_module, 'external-module')
+ self.assertFalse('externalprereq' in spec)
+ self.assertTrue(spec['externalmodule'].compiler.satisfies('gcc'))
def test_nobuild_package(self):
got_error = False
@@ -262,16 +270,16 @@ class ConcretizeTest(MockPackagesTest):
got_error = True
self.assertTrue(got_error)
-
def test_external_and_virtual(self):
spec = Spec('externaltest')
spec.concretize()
- self.assertEqual(spec['externaltool'].external, '/path/to/external_tool')
- self.assertEqual(spec['stuff'].external, '/path/to/external_virtual_gcc')
+ self.assertEqual(
+ spec['externaltool'].external, '/path/to/external_tool')
+ self.assertEqual(
+ spec['stuff'].external, '/path/to/external_virtual_gcc')
self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
self.assertTrue(spec['stuff'].compiler.satisfies('gcc'))
-
def test_find_spec_parents(self):
"""Tests the spec finding logic used by concretization. """
s = Spec('a +foo',
@@ -282,7 +290,6 @@ class ConcretizeTest(MockPackagesTest):
self.assertEqual('a', find_spec(s['b'], lambda s: '+foo' in s).name)
-
def test_find_spec_children(self):
s = Spec('a',
Spec('b +foo',
@@ -297,7 +304,6 @@ class ConcretizeTest(MockPackagesTest):
Spec('e +foo'))
self.assertEqual('c', find_spec(s['b'], lambda s: '+foo' in s).name)
-
def test_find_spec_sibling(self):
s = Spec('a',
Spec('b +foo',
@@ -315,7 +321,6 @@ class ConcretizeTest(MockPackagesTest):
Spec('f +foo')))
self.assertEqual('f', find_spec(s['b'], lambda s: '+foo' in s).name)
-
def test_find_spec_self(self):
s = Spec('a',
Spec('b +foo',
@@ -324,7 +329,6 @@ class ConcretizeTest(MockPackagesTest):
Spec('e'))
self.assertEqual('b', find_spec(s['b'], lambda s: '+foo' in s).name)
-
def test_find_spec_none(self):
s = Spec('a',
Spec('b',
@@ -333,7 +337,6 @@ class ConcretizeTest(MockPackagesTest):
Spec('e'))
self.assertEqual(None, find_spec(s['b'], lambda s: '+foo' in s))
-
def test_compiler_child(self):
s = Spec('mpileaks%clang ^dyninst%gcc')
s.concretize()
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
new file mode 100644
index 0000000000..2c8bedc33f
--- /dev/null
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -0,0 +1,106 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import spack
+import spack.architecture
+from spack.test.mock_packages_test import *
+from tempfile import mkdtemp
+
+
+class ConcretizePreferencesTest(MockPackagesTest):
+ """Test concretization preferences are being applied correctly.
+ """
+
+ def setUp(self):
+ """Create config section to store concretization preferences
+ """
+ super(ConcretizePreferencesTest, self).setUp()
+ self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
+ spack.config.ConfigScope('concretize',
+ os.path.join(self.tmp_dir, 'concretize'))
+
+ def tearDown(self):
+ super(ConcretizePreferencesTest, self).tearDown()
+ shutil.rmtree(self.tmp_dir, True)
+ spack.pkgsort = spack.PreferredPackages()
+
+ def concretize(self, abstract_spec):
+ return Spec(abstract_spec).concretized()
+
+ def update_packages(self, pkgname, section, value):
+ """Update config and reread package list"""
+ conf = {pkgname: {section: value}}
+ spack.config.update_config('packages', conf, 'concretize')
+ spack.pkgsort = spack.PreferredPackages()
+
+ def assert_variant_values(self, spec, **variants):
+ concrete = self.concretize(spec)
+ for variant, value in variants.items():
+ self.assertEqual(concrete.variants[variant].value, value)
+
+ def test_preferred_variants(self):
+ """Test preferred variants are applied correctly
+ """
+ self.update_packages('mpileaks', 'variants',
+ '~debug~opt+shared+static')
+ self.assert_variant_values('mpileaks', debug=False, opt=False,
+ shared=True, static=True)
+
+ self.update_packages('mpileaks', 'variants',
+ ['+debug', '+opt', '~shared', '-static'])
+ self.assert_variant_values('mpileaks', debug=True, opt=True,
+ shared=False, static=False)
+
+ def test_preferred_compilers(self):
+ """Test preferred compilers are applied correctly
+ """
+ self.update_packages('mpileaks', 'compiler', ['clang@3.3'])
+ spec = self.concretize('mpileaks')
+ self.assertEqual(spec.compiler, spack.spec.CompilerSpec('clang@3.3'))
+
+ self.update_packages('mpileaks', 'compiler', ['gcc@4.5.0'])
+ spec = self.concretize('mpileaks')
+ self.assertEqual(spec.compiler, spack.spec.CompilerSpec('gcc@4.5.0'))
+
+ def test_preferred_versions(self):
+ """Test preferred package versions are applied correctly
+ """
+ self.update_packages('mpileaks', 'version', ['2.3'])
+ spec = self.concretize('mpileaks')
+ self.assertEqual(spec.version, spack.spec.Version('2.3'))
+
+ self.update_packages('mpileaks', 'version', ['2.2'])
+ spec = self.concretize('mpileaks')
+ self.assertEqual(spec.version, spack.spec.Version('2.2'))
+
+ def test_preferred_providers(self):
+ """Test preferred providers of virtual packages are applied correctly
+ """
+ self.update_packages('all', 'providers', {'mpi': ['mpich']})
+ spec = self.concretize('mpileaks')
+ self.assertTrue('mpich' in spec)
+
+ self.update_packages('all', 'providers', {'mpi': ['zmpi']})
+ spec = self.concretize('mpileaks')
+ self.assertTrue('zmpi', spec)
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index eff482f4c6..0822e44db8 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -32,49 +32,80 @@ from ordereddict_backport import OrderedDict
from spack.test.mock_packages_test import *
# Some sample compiler config data
-a_comps = {
- "x86_64_E5v2_IntelIB": {
- "gcc@4.7.3" : {
- "cc" : "/gcc473",
+a_comps = [
+ {'compiler': {
+ 'paths': {
+ "cc": "/gcc473",
"cxx": "/g++473",
"f77": None,
- "fc" : None },
- "gcc@4.5.0" : {
- "cc" : "/gcc450",
+ "fc": None
+ },
+ 'modules': None,
+ 'spec': 'gcc@4.7.3',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "/gcc450",
"cxx": "/g++450",
- "f77": "/gfortran",
- "fc" : "/gfortran" },
- "clang@3.3" : {
- "cc" : "<overwritten>",
+ "f77": 'gfortran',
+ "fc": 'gfortran'
+ },
+ 'modules': None,
+ 'spec': 'gcc@4.5.0',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "<overwritten>",
"cxx": "<overwritten>",
- "f77": "<overwritten>",
- "fc" : "<overwritten>" }
- }
-}
-
-b_comps = {
- "x86_64_E5v3": {
- "icc@10.0" : {
- "cc" : "/icc100",
- "cxx": "/icc100",
+ "f77": '<overwritten>',
+ "fc": '<overwritten>'},
+ 'modules': None,
+ 'spec': 'clang@3.3',
+ 'operating_system': 'CNL10'
+ }}
+]
+
+b_comps = [
+ {'compiler': {
+ 'paths': {
+ "cc": "/icc100",
+ "cxx": "/icp100",
"f77": None,
- "fc" : None },
- "icc@11.1" : {
- "cc" : "/icc111",
+ "fc": None
+ },
+ 'modules': None,
+ 'spec': 'icc@10.0',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "/icc111",
"cxx": "/icp111",
- "f77": "/ifort",
- "fc" : "/ifort" },
- "clang@3.3" : {
- "cc" : "/clang",
- "cxx": "/clang++",
- "f77": None,
- "fc" : None}
- }
-}
+ "f77": 'ifort',
+ "fc": 'ifort'
+ },
+ 'modules': None,
+ 'spec': 'icc@11.1',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "<overwritten>",
+ "cxx": "<overwritten>",
+ "f77": '<overwritten>',
+ "fc": '<overwritten>'},
+ 'modules': None,
+ 'spec': 'clang@3.3',
+ 'operating_system': 'CNL10'
+ }}
+]
# Some Sample repo data
-repos_low = [ "/some/path" ]
-repos_high = [ "/some/other/path" ]
+repos_low = ["/some/path"]
+repos_high = ["/some/other/path"]
+
class ConfigTest(MockPackagesTest):
@@ -82,28 +113,42 @@ class ConfigTest(MockPackagesTest):
super(ConfigTest, self).setUp()
self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
spack.config.config_scopes = OrderedDict()
- spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low'))
- spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))
+ spack.config.ConfigScope(
+ 'test_low_priority', os.path.join(self.tmp_dir, 'low'))
+ spack.config.ConfigScope('test_high_priority',
+ os.path.join(self.tmp_dir, 'high'))
def tearDown(self):
super(ConfigTest, self).tearDown()
shutil.rmtree(self.tmp_dir, True)
- def check_config(self, comps, arch, *compiler_names):
+ def check_config(self, comps, *compiler_names):
"""Check that named compilers in comps match Spack's config."""
config = spack.config.get_config('compilers')
compiler_list = ['cc', 'cxx', 'f77', 'fc']
- for key in compiler_names:
- for c in compiler_list:
- expected = comps[arch][key][c]
- actual = config[arch][key][c]
- self.assertEqual(expected, actual)
+ param_list = ['modules', 'paths', 'spec', 'operating_system']
+ for compiler in config:
+ conf = compiler['compiler']
+ if conf['spec'] in compiler_names:
+ comp = None
+ for c in comps:
+ if c['compiler']['spec'] == conf['spec']:
+ comp = c['compiler']
+ break
+ if not comp:
+ self.fail('Bad config spec')
+ for p in param_list:
+ self.assertEqual(conf[p], comp[p])
+ for c in compiler_list:
+ expected = comp['paths'][c]
+ actual = conf['paths'][c]
+ self.assertEqual(expected, actual)
def test_write_list_in_memory(self):
spack.config.update_config('repos', repos_low, 'test_low_priority')
spack.config.update_config('repos', repos_high, 'test_high_priority')
config = spack.config.get_config('repos')
- self.assertEqual(config, repos_high+repos_low)
+ self.assertEqual(config, repos_high + repos_low)
def test_write_key_in_memory(self):
# Write b_comps "on top of" a_comps.
@@ -111,8 +156,8 @@ class ConfigTest(MockPackagesTest):
spack.config.update_config('compilers', b_comps, 'test_high_priority')
# Make sure the config looks how we expect.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
def test_write_key_to_disk(self):
# Write b_comps "on top of" a_comps.
@@ -123,8 +168,8 @@ class ConfigTest(MockPackagesTest):
spack.config.clear_config_caches()
# Same check again, to ensure consistency.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
def test_write_to_same_priority_file(self):
# Write b_comps in the same file as a_comps.
@@ -135,5 +180,5 @@ class ConfigTest(MockPackagesTest):
spack.config.clear_config_caches()
# Same check again, to ensure consistency.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ self.check_config(a_comps, 'gcc@4.7.3', 'gcc@4.5.0')
+ self.check_config(b_comps, 'icc@10.0', 'icc@11.1', 'clang@3.3')
diff --git a/lib/spack/spack/test/data/sourceme_first.sh b/lib/spack/spack/test/data/sourceme_first.sh
new file mode 100644
index 0000000000..800f639ac8
--- /dev/null
+++ b/lib/spack/spack/test/data/sourceme_first.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+export NEW_VAR='new'
+export UNSET_ME='overridden'
diff --git a/lib/spack/spack/test/data/sourceme_second.sh b/lib/spack/spack/test/data/sourceme_second.sh
new file mode 100644
index 0000000000..9955a0e6d6
--- /dev/null
+++ b/lib/spack/spack/test/data/sourceme_second.sh
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+export PATH_LIST='/path/first:/path/second:/path/fourth'
+unset EMPTY_PATH_LIST \ No newline at end of file
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index e1322f2081..22b1f17890 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -31,7 +31,6 @@ import multiprocessing
import spack
from llnl.util.filesystem import join_path
-from llnl.util.lock import *
from llnl.util.tty.colify import colify
from spack.test.mock_database import MockDatabase
@@ -72,6 +71,7 @@ def _print_ref_counts():
class DatabaseTest(MockDatabase):
+
def test_005_db_exists(self):
"""Make sure db cache file exists after creating."""
index_file = join_path(self.install_path, '.spack-db', 'index.yaml')
@@ -88,26 +88,28 @@ class DatabaseTest(MockDatabase):
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
- mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+ mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
self.assertEqual(len(mpileaks_specs), 3)
self.assertEqual(len(callpath_specs), 3)
self.assertEqual(len(mpi_specs), 3)
# query specs with single configurations
- dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
+ dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')]
- libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
+ libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
self.assertEqual(len(dyninst_specs), 1)
self.assertEqual(len(libdwarf_specs), 1)
self.assertEqual(len(libelf_specs), 1)
# Query by dependency
- self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]), 1)
- self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]), 1)
- self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]), 1)
-
+ self.assertEqual(
+ len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]), 1)
+ self.assertEqual(
+ len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]), 1)
+ self.assertEqual(
+ len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]), 1)
def test_015_write_and_read(self):
# write and read DB
@@ -122,7 +124,6 @@ class DatabaseTest(MockDatabase):
self.assertEqual(new_rec.path, rec.path)
self.assertEqual(new_rec.installed, rec.installed)
-
def _check_db_sanity(self):
"""Utiilty function to check db against install layout."""
expected = sorted(spack.install_layout.all_specs())
@@ -132,12 +133,10 @@ class DatabaseTest(MockDatabase):
for e, a in zip(expected, actual):
self.assertEqual(e, a)
-
def test_020_db_sanity(self):
"""Make sure query() returns what's actually in the db."""
self._check_db_sanity()
-
def test_030_db_sanity_from_another_process(self):
def read_and_modify():
self._check_db_sanity() # check that other process can read DB
@@ -152,30 +151,28 @@ class DatabaseTest(MockDatabase):
with self.installed_db.read_transaction():
self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 0)
-
def test_040_ref_counts(self):
"""Ensure that we got ref counts right when we read the DB."""
self.installed_db._check_ref_counts()
-
def test_050_basic_query(self):
- """Ensure that querying the database is consistent with what is installed."""
+ """Ensure querying database is consistent with what is installed."""
# query everything
self.assertEqual(len(spack.installed_db.query()), 13)
# query specs with multiple configurations
mpileaks_specs = self.installed_db.query('mpileaks')
callpath_specs = self.installed_db.query('callpath')
- mpi_specs = self.installed_db.query('mpi')
+ mpi_specs = self.installed_db.query('mpi')
self.assertEqual(len(mpileaks_specs), 3)
self.assertEqual(len(callpath_specs), 3)
self.assertEqual(len(mpi_specs), 3)
# query specs with single configurations
- dyninst_specs = self.installed_db.query('dyninst')
+ dyninst_specs = self.installed_db.query('dyninst')
libdwarf_specs = self.installed_db.query('libdwarf')
- libelf_specs = self.installed_db.query('libelf')
+ libelf_specs = self.installed_db.query('libelf')
self.assertEqual(len(dyninst_specs), 1)
self.assertEqual(len(libdwarf_specs), 1)
@@ -186,7 +183,6 @@ class DatabaseTest(MockDatabase):
self.assertEqual(len(self.installed_db.query('mpileaks ^mpich2')), 1)
self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 1)
-
def _check_remove_and_add_package(self, spec):
"""Remove a spec from the DB, then add it and make sure everything's
still ok once it is added. This checks that it was
@@ -215,15 +211,12 @@ class DatabaseTest(MockDatabase):
self._check_db_sanity()
self.installed_db._check_ref_counts()
-
def test_060_remove_and_add_root_package(self):
self._check_remove_and_add_package('mpileaks ^mpich')
-
def test_070_remove_and_add_dependency_package(self):
self._check_remove_and_add_package('dyninst')
-
def test_080_root_ref_counts(self):
rec = self.installed_db.get_record('mpileaks ^mpich')
@@ -231,45 +224,89 @@ class DatabaseTest(MockDatabase):
self.installed_db.remove('mpileaks ^mpich')
# record no longer in DB
- self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), [])
+ self.assertEqual(
+ self.installed_db.query('mpileaks ^mpich', installed=any), [])
# record's deps have updated ref_counts
- self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 0)
+ self.assertEqual(
+ self.installed_db.get_record('callpath ^mpich').ref_count, 0)
self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1)
- # put the spec back
+ # Put the spec back
self.installed_db.add(rec.spec, rec.path)
# record is present again
- self.assertEqual(len(self.installed_db.query('mpileaks ^mpich', installed=any)), 1)
+ self.assertEqual(
+ len(self.installed_db.query('mpileaks ^mpich', installed=any)), 1)
# dependencies have ref counts updated
- self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 1)
+ self.assertEqual(
+ self.installed_db.get_record('callpath ^mpich').ref_count, 1)
self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2)
-
def test_090_non_root_ref_counts(self):
- mpileaks_mpich_rec = self.installed_db.get_record('mpileaks ^mpich')
- callpath_mpich_rec = self.installed_db.get_record('callpath ^mpich')
+ self.installed_db.get_record('mpileaks ^mpich')
+ self.installed_db.get_record('callpath ^mpich')
# "force remove" a non-root spec from the DB
self.installed_db.remove('callpath ^mpich')
# record still in DB but marked uninstalled
- self.assertEqual(self.installed_db.query('callpath ^mpich', installed=True), [])
- self.assertEqual(len(self.installed_db.query('callpath ^mpich', installed=any)), 1)
+ self.assertEqual(
+ self.installed_db.query('callpath ^mpich', installed=True), [])
+ self.assertEqual(
+ len(self.installed_db.query('callpath ^mpich', installed=any)), 1)
# record and its deps have same ref_counts
- self.assertEqual(self.installed_db.get_record('callpath ^mpich', installed=any).ref_count, 1)
+ self.assertEqual(self.installed_db.get_record(
+ 'callpath ^mpich', installed=any).ref_count, 1)
self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2)
# remove only dependent of uninstalled callpath record
self.installed_db.remove('mpileaks ^mpich')
# record and parent are completely gone.
- self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), [])
- self.assertEqual(self.installed_db.query('callpath ^mpich', installed=any), [])
+ self.assertEqual(
+ self.installed_db.query('mpileaks ^mpich', installed=any), [])
+ self.assertEqual(
+ self.installed_db.query('callpath ^mpich', installed=any), [])
# mpich ref count updated properly.
mpich_rec = self.installed_db.get_record('mpich')
self.assertEqual(mpich_rec.ref_count, 0)
+
+ def test_100_no_write_with_exception_on_remove(self):
+ def fail_while_writing():
+ with self.installed_db.write_transaction():
+ self._mock_remove('mpileaks ^zmpi')
+ raise Exception()
+
+ with self.installed_db.read_transaction():
+ self.assertEqual(
+ len(self.installed_db.query('mpileaks ^zmpi', installed=any)),
+ 1)
+
+ self.assertRaises(Exception, fail_while_writing)
+
+ # reload DB and make sure zmpi is still there.
+ with self.installed_db.read_transaction():
+ self.assertEqual(
+ len(self.installed_db.query('mpileaks ^zmpi', installed=any)),
+ 1)
+
+ def test_110_no_write_with_exception_on_install(self):
+ def fail_while_writing():
+ with self.installed_db.write_transaction():
+ self._mock_install('cmake')
+ raise Exception()
+
+ with self.installed_db.read_transaction():
+ self.assertEqual(
+ self.installed_db.query('cmake', installed=any), [])
+
+ self.assertRaises(Exception, fail_while_writing)
+
+ # reload DB and make sure cmake was not written.
+ with self.installed_db.read_transaction():
+ self.assertEqual(
+ self.installed_db.query('cmake', installed=any), [])
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index 74669fe8a2..2d0565acae 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -49,13 +49,11 @@ class DirectoryLayoutTest(MockPackagesTest):
self.tmpdir = tempfile.mkdtemp()
self.layout = YamlDirectoryLayout(self.tmpdir)
-
def tearDown(self):
super(DirectoryLayoutTest, self).tearDown()
shutil.rmtree(self.tmpdir, ignore_errors=True)
self.layout = None
-
def test_read_and_write_spec(self):
"""This goes through each package in spack and creates a directory for
it. It then ensures that the spec for the directory's
@@ -67,8 +65,8 @@ class DirectoryLayoutTest(MockPackagesTest):
for pkg in packages:
if pkg.name.startswith('external'):
- #External package tests cannot be installed
- continue
+ # External package tests cannot be installed
+ continue
spec = pkg.spec
# If a spec fails to concretize, just skip it. If it is a
@@ -115,7 +113,6 @@ class DirectoryLayoutTest(MockPackagesTest):
self.assertFalse(os.path.isdir(install_dir))
self.assertFalse(os.path.exists(install_dir))
-
def test_handle_unknown_package(self):
"""This test ensures that spack can at least do *some*
operations with packages that are installed but that it
@@ -166,7 +163,6 @@ class DirectoryLayoutTest(MockPackagesTest):
spack.repo.swap(mock_db)
-
def test_find(self):
"""Test that finding specs within an install layout works."""
packages = list(spack.repo.all_packages())[:max_packages]
@@ -175,13 +171,14 @@ class DirectoryLayoutTest(MockPackagesTest):
installed_specs = {}
for pkg in packages:
if pkg.name.startswith('external'):
- #External package tests cannot be installed
+ # External package tests cannot be installed
continue
spec = pkg.spec.concretized()
installed_specs[spec.name] = spec
self.layout.create_install_directory(spec)
- # Make sure all the installed specs appear in DirectoryLayout.all_specs()
+ # Make sure all the installed specs appear in
+ # DirectoryLayout.all_specs()
found_specs = dict((s.name, s) for s in self.layout.all_specs())
for name, spec in found_specs.items():
self.assertTrue(name in found_specs)
diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py
index ded1539e18..9b5d75f273 100644
--- a/lib/spack/spack/test/environment.py
+++ b/lib/spack/spack/test/environment.py
@@ -24,16 +24,25 @@
##############################################################################
import unittest
import os
+
+from spack import spack_root
+from llnl.util.filesystem import join_path
from spack.environment import EnvironmentModifications
+from spack.environment import SetEnv, UnsetEnv
+from spack.environment import RemovePath, PrependPath, AppendPath
class EnvironmentTest(unittest.TestCase):
+
def setUp(self):
- os.environ.clear()
os.environ['UNSET_ME'] = 'foo'
os.environ['EMPTY_PATH_LIST'] = ''
os.environ['PATH_LIST'] = '/path/second:/path/third'
- os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
+ os.environ['REMOVE_PATH_LIST'] = \
+ '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
+
+ def tearDown(self):
+ pass
def test_set(self):
env = EnvironmentModifications()
@@ -74,9 +83,18 @@ class EnvironmentTest(unittest.TestCase):
env.remove_path('REMOVE_PATH_LIST', '/duplicate/')
env.apply_modifications()
- self.assertEqual('/path/first:/path/second:/path/third:/path/last', os.environ['PATH_LIST'])
- self.assertEqual('/path/first:/path/middle:/path/last', os.environ['EMPTY_PATH_LIST'])
- self.assertEqual('/path/first:/path/middle:/path/last', os.environ['NEWLY_CREATED_PATH_LIST'])
+ self.assertEqual(
+ '/path/first:/path/second:/path/third:/path/last',
+ os.environ['PATH_LIST']
+ )
+ self.assertEqual(
+ '/path/first:/path/middle:/path/last',
+ os.environ['EMPTY_PATH_LIST']
+ )
+ self.assertEqual(
+ '/path/first:/path/middle:/path/last',
+ os.environ['NEWLY_CREATED_PATH_LIST']
+ )
self.assertEqual('/a/b:/a/c:/a/d:/f/g', os.environ['REMOVE_PATH_LIST'])
def test_extra_arguments(self):
@@ -95,3 +113,46 @@ class EnvironmentTest(unittest.TestCase):
self.assertEqual(len(copy_construct), 2)
for x, y in zip(env, copy_construct):
assert x is y
+
+ def test_source_files(self):
+ datadir = join_path(spack_root, 'lib', 'spack',
+ 'spack', 'test', 'data')
+ files = [
+ join_path(datadir, 'sourceme_first.sh'),
+ join_path(datadir, 'sourceme_second.sh')
+ ]
+ env = EnvironmentModifications.from_sourcing_files(*files)
+ modifications = env.group_by_name()
+
+ # This is sensitive to the user's environment; can include
+ # spurious entries for things like PS1
+ #
+ # TODO: figure out how to make a bit more robust.
+ self.assertTrue(len(modifications) >= 4)
+
+ # Set new variables
+ self.assertEqual(len(modifications['NEW_VAR']), 1)
+ self.assertTrue(isinstance(modifications['NEW_VAR'][0], SetEnv))
+ self.assertEqual(modifications['NEW_VAR'][0].value, 'new')
+ # Unset variables
+ self.assertEqual(len(modifications['EMPTY_PATH_LIST']), 1)
+ self.assertTrue(isinstance(
+ modifications['EMPTY_PATH_LIST'][0], UnsetEnv))
+ # Modified variables
+ self.assertEqual(len(modifications['UNSET_ME']), 1)
+ self.assertTrue(isinstance(modifications['UNSET_ME'][0], SetEnv))
+ self.assertEqual(modifications['UNSET_ME'][0].value, 'overridden')
+
+ self.assertEqual(len(modifications['PATH_LIST']), 3)
+ self.assertTrue(
+ isinstance(modifications['PATH_LIST'][0], RemovePath)
+ )
+ self.assertEqual(modifications['PATH_LIST'][0].value, '/path/third')
+ self.assertTrue(
+ isinstance(modifications['PATH_LIST'][1], AppendPath)
+ )
+ self.assertEqual(modifications['PATH_LIST'][1].value, '/path/fourth')
+ self.assertTrue(
+ isinstance(modifications['PATH_LIST'][2], PrependPath)
+ )
+ self.assertEqual(modifications['PATH_LIST'][2].value, '/path/first')
diff --git a/lib/spack/spack/test/file_cache.py b/lib/spack/spack/test/file_cache.py
new file mode 100644
index 0000000000..cc66beda2e
--- /dev/null
+++ b/lib/spack/spack/test/file_cache.py
@@ -0,0 +1,83 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+Test Spack's FileCache.
+"""
+import os
+import shutil
+import tempfile
+import unittest
+
+from spack.file_cache import FileCache
+
+
+class FileCacheTest(unittest.TestCase):
+ """Ensure that a file cache can properly write to a file and recover its
+ contents."""
+
+ def setUp(self):
+ self.scratch_dir = tempfile.mkdtemp()
+ self.cache = FileCache(self.scratch_dir)
+
+ def tearDown(self):
+ shutil.rmtree(self.scratch_dir)
+
+ def test_write_and_read_cache_file(self):
+ """Test writing then reading a cached file."""
+ with self.cache.write_transaction('test.yaml') as (old, new):
+ self.assertTrue(old is None)
+ self.assertTrue(new is not None)
+ new.write("foobar\n")
+
+ with self.cache.read_transaction('test.yaml') as stream:
+ text = stream.read()
+ self.assertEqual("foobar\n", text)
+
+ def test_remove(self):
+ """Test removing an entry from the cache."""
+ self.test_write_and_write_cache_file()
+
+ self.cache.remove('test.yaml')
+
+ self.assertFalse(os.path.exists(self.cache.cache_path('test.yaml')))
+ self.assertFalse(os.path.exists(self.cache._lock_path('test.yaml')))
+
+ def test_write_and_write_cache_file(self):
+ """Test two write transactions on a cached file."""
+ with self.cache.write_transaction('test.yaml') as (old, new):
+ self.assertTrue(old is None)
+ self.assertTrue(new is not None)
+ new.write("foobar\n")
+
+ with self.cache.write_transaction('test.yaml') as (old, new):
+ self.assertTrue(old is not None)
+ text = old.read()
+ self.assertEqual("foobar\n", text)
+ self.assertTrue(new is not None)
+ new.write("barbaz\n")
+
+ with self.cache.read_transaction('test.yaml') as stream:
+ text = stream.read()
+ self.assertEqual("barbaz\n", text)
diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py
index 4de65760d7..0d1a8fe949 100644
--- a/lib/spack/spack/test/git_fetch.py
+++ b/lib/spack/spack/test/git_fetch.py
@@ -87,33 +87,29 @@ class GitFetchTest(MockPackagesTest):
self.assert_rev(rev)
-
def test_fetch_master(self):
"""Test a default git checkout with no commit or tag specified."""
self.try_fetch('master', self.repo.r0_file, {
- 'git' : self.repo.path
+ 'git': self.repo.path
})
-
def test_fetch_branch(self):
"""Test fetching a branch."""
self.try_fetch(self.repo.branch, self.repo.branch_file, {
- 'git' : self.repo.path,
- 'branch' : self.repo.branch
+ 'git': self.repo.path,
+ 'branch': self.repo.branch
})
-
def test_fetch_tag(self):
"""Test fetching a tag."""
self.try_fetch(self.repo.tag, self.repo.tag_file, {
- 'git' : self.repo.path,
- 'tag' : self.repo.tag
+ 'git': self.repo.path,
+ 'tag': self.repo.tag
})
-
def test_fetch_commit(self):
"""Test fetching a particular commit."""
self.try_fetch(self.repo.r1, self.repo.r1_file, {
- 'git' : self.repo.path,
- 'commit' : self.repo.r1
+ 'git': self.repo.path,
+ 'commit': self.repo.r1
})
diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py
index 292ffba949..44af6730a1 100644
--- a/lib/spack/spack/test/hg_fetch.py
+++ b/lib/spack/spack/test/hg_fetch.py
@@ -83,17 +83,15 @@ class HgFetchTest(MockPackagesTest):
self.assertEqual(self.repo.get_rev(), rev)
-
def test_fetch_default(self):
"""Test a default hg checkout with no commit or tag specified."""
self.try_fetch(self.repo.r1, self.repo.r1_file, {
- 'hg' : self.repo.path
+ 'hg': self.repo.path
})
-
def test_fetch_rev0(self):
"""Test fetching a branch."""
self.try_fetch(self.repo.r0, self.repo.r0_file, {
- 'hg' : self.repo.path,
- 'revision' : self.repo.r0
+ 'hg': self.repo.path,
+ 'revision': self.repo.r0
})
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index cfe6ea9b27..232d5aeeaf 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -28,6 +28,7 @@ import tempfile
import spack
from llnl.util.filesystem import *
from spack.directory_layout import YamlDirectoryLayout
+from spack.database import Database
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
from spack.test.mock_packages_test import *
from spack.test.mock_repo import MockArchive
@@ -49,8 +50,10 @@ class InstallTest(MockPackagesTest):
# installed pkgs and mock packages.
self.tmpdir = tempfile.mkdtemp()
self.orig_layout = spack.install_layout
- spack.install_layout = YamlDirectoryLayout(self.tmpdir)
+ self.orig_db = spack.installed_db
+ spack.install_layout = YamlDirectoryLayout(self.tmpdir)
+ spack.installed_db = Database(self.tmpdir)
def tearDown(self):
super(InstallTest, self).tearDown()
@@ -61,17 +64,16 @@ class InstallTest(MockPackagesTest):
# restore spack's layout.
spack.install_layout = self.orig_layout
+ spack.installed_db = self.orig_db
shutil.rmtree(self.tmpdir, ignore_errors=True)
-
def fake_fetchify(self, pkg):
"""Fake the URL for a package so it downloads from a file."""
fetcher = FetchStrategyComposite()
fetcher.append(URLFetchStrategy(self.repo.url))
pkg.fetcher = fetcher
-
- def ztest_install_and_uninstall(self):
+ def test_install_and_uninstall(self):
# Get a basic concrete spec for the trivial install package.
spec = Spec('trivial_install_test_package')
spec.concretize()
@@ -85,11 +87,10 @@ class InstallTest(MockPackagesTest):
try:
pkg.do_install()
pkg.do_uninstall()
- except Exception, e:
+ except Exception:
pkg.remove_prefix()
raise
-
def test_install_environment(self):
spec = Spec('cmake-client').concretized()
@@ -99,6 +100,6 @@ class InstallTest(MockPackagesTest):
pkg = spec.package
try:
pkg.do_install()
- except Exception, e:
+ except Exception:
pkg.remove_prefix()
raise
diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py
index de40991b57..5d0a7430b6 100644
--- a/lib/spack/spack/test/link_tree.py
+++ b/lib/spack/spack/test/link_tree.py
@@ -53,16 +53,13 @@ class LinkTreeTest(unittest.TestCase):
def tearDown(self):
self.stage.destroy()
-
def check_file_link(self, filename):
self.assertTrue(os.path.isfile(filename))
self.assertTrue(os.path.islink(filename))
-
def check_dir(self, filename):
self.assertTrue(os.path.isdir(filename))
-
def test_merge_to_new_directory(self):
with working_dir(self.stage.path):
self.link_tree.merge('dest')
@@ -79,7 +76,6 @@ class LinkTreeTest(unittest.TestCase):
self.assertFalse(os.path.exists('dest'))
-
def test_merge_to_existing_directory(self):
with working_dir(self.stage.path):
@@ -112,7 +108,6 @@ class LinkTreeTest(unittest.TestCase):
self.assertFalse(os.path.isfile('dest/c/d/6'))
self.assertFalse(os.path.isfile('dest/c/d/e/7'))
-
def test_merge_with_empty_directories(self):
with working_dir(self.stage.path):
mkdirp('dest/f/g')
@@ -132,7 +127,6 @@ class LinkTreeTest(unittest.TestCase):
self.assertTrue(os.path.isdir('dest/a/b/h'))
self.assertTrue(os.path.isdir('dest/f/g'))
-
def test_ignore(self):
with working_dir(self.stage.path):
touchp('source/.spec')
diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py
index 0e9f6daf4d..fb96539897 100644
--- a/lib/spack/spack/test/lock.py
+++ b/lib/spack/spack/test/lock.py
@@ -46,21 +46,21 @@ class LockTest(unittest.TestCase):
self.lock_path = join_path(self.tempdir, 'lockfile')
touch(self.lock_path)
-
def tearDown(self):
- shutil.rmtree(self.tempdir, ignore_errors=True)
-
+ shutil.rmtree(self.tempdir, ignore_errors=True)
def multiproc_test(self, *functions):
"""Order some processes using simple barrier synchronization."""
b = Barrier(len(functions), timeout=barrier_timeout)
procs = [Process(target=f, args=(b,)) for f in functions]
- for p in procs: p.start()
+
+ for p in procs:
+ p.start()
+
for p in procs:
p.join()
self.assertEqual(p.exitcode, 0)
-
#
# Process snippets below can be composed into tests.
#
@@ -68,27 +68,26 @@ class LockTest(unittest.TestCase):
lock = Lock(self.lock_path)
lock.acquire_write() # grab exclusive lock
barrier.wait()
- barrier.wait() # hold the lock until exception raises in other procs.
+ barrier.wait() # hold the lock until exception raises in other procs.
def acquire_read(self, barrier):
lock = Lock(self.lock_path)
lock.acquire_read() # grab shared lock
barrier.wait()
- barrier.wait() # hold the lock until exception raises in other procs.
+ barrier.wait() # hold the lock until exception raises in other procs.
def timeout_write(self, barrier):
lock = Lock(self.lock_path)
- barrier.wait() # wait for lock acquire in first process
+ barrier.wait() # wait for lock acquire in first process
self.assertRaises(LockError, lock.acquire_write, 0.1)
barrier.wait()
def timeout_read(self, barrier):
lock = Lock(self.lock_path)
- barrier.wait() # wait for lock acquire in first process
+ barrier.wait() # wait for lock acquire in first process
self.assertRaises(LockError, lock.acquire_read, 0.1)
barrier.wait()
-
#
# Test that exclusive locks on other processes time out when an
# exclusive lock is held.
@@ -97,11 +96,13 @@ class LockTest(unittest.TestCase):
self.multiproc_test(self.acquire_write, self.timeout_write)
def test_write_lock_timeout_on_write_2(self):
- self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_write, self.timeout_write, self.timeout_write)
def test_write_lock_timeout_on_write_3(self):
- self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write, self.timeout_write)
-
+ self.multiproc_test(
+ self.acquire_write, self.timeout_write, self.timeout_write,
+ self.timeout_write)
#
# Test that shared locks on other processes time out when an
@@ -111,11 +112,13 @@ class LockTest(unittest.TestCase):
self.multiproc_test(self.acquire_write, self.timeout_read)
def test_read_lock_timeout_on_write_2(self):
- self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read)
+ self.multiproc_test(
+ self.acquire_write, self.timeout_read, self.timeout_read)
def test_read_lock_timeout_on_write_3(self):
- self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read, self.timeout_read)
-
+ self.multiproc_test(
+ self.acquire_write, self.timeout_read, self.timeout_read,
+ self.timeout_read)
#
# Test that exclusive locks time out when shared locks are held.
@@ -124,27 +127,35 @@ class LockTest(unittest.TestCase):
self.multiproc_test(self.acquire_read, self.timeout_write)
def test_write_lock_timeout_on_read_2(self):
- self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read, self.timeout_write, self.timeout_write)
def test_write_lock_timeout_on_read_3(self):
- self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write, self.timeout_write)
-
+ self.multiproc_test(
+ self.acquire_read, self.timeout_write, self.timeout_write,
+ self.timeout_write)
#
# Test that exclusive locks time while lots of shared locks are held.
#
def test_write_lock_timeout_with_multiple_readers_2_1(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read, self.acquire_read, self.timeout_write)
def test_write_lock_timeout_with_multiple_readers_2_2(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read, self.acquire_read, self.timeout_write,
+ self.timeout_write)
def test_write_lock_timeout_with_multiple_readers_3_1(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read, self.acquire_read, self.acquire_read,
+ self.timeout_write)
def test_write_lock_timeout_with_multiple_readers_3_2(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write)
-
+ self.multiproc_test(
+ self.acquire_read, self.acquire_read, self.acquire_read,
+ self.timeout_write, self.timeout_write)
#
# Longer test case that ensures locks are reusable. Ordering is
@@ -155,110 +166,283 @@ class LockTest(unittest.TestCase):
lock = Lock(self.lock_path)
lock.acquire_write()
- barrier.wait() # ---------------------------------------- 1
+ barrier.wait() # ---------------------------------------- 1
# others test timeout
- barrier.wait() # ---------------------------------------- 2
+ barrier.wait() # ---------------------------------------- 2
lock.release_write() # release and others acquire read
- barrier.wait() # ---------------------------------------- 3
+ barrier.wait() # ---------------------------------------- 3
self.assertRaises(LockError, lock.acquire_write, 0.1)
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 4
+ barrier.wait() # ---------------------------------------- 4
lock.release_read()
- barrier.wait() # ---------------------------------------- 5
+ barrier.wait() # ---------------------------------------- 5
# p2 upgrades read to write
- barrier.wait() # ---------------------------------------- 6
+ barrier.wait() # ---------------------------------------- 6
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 7
+ barrier.wait() # ---------------------------------------- 7
# p2 releases write and read
- barrier.wait() # ---------------------------------------- 8
+ barrier.wait() # ---------------------------------------- 8
# p3 acquires read
- barrier.wait() # ---------------------------------------- 9
+ barrier.wait() # ---------------------------------------- 9
# p3 upgrades read to write
- barrier.wait() # ---------------------------------------- 10
+ barrier.wait() # ---------------------------------------- 10
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 11
+ barrier.wait() # ---------------------------------------- 11
# p3 releases locks
- barrier.wait() # ---------------------------------------- 12
+ barrier.wait() # ---------------------------------------- 12
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 13
+ barrier.wait() # ---------------------------------------- 13
lock.release_read()
-
def p2(barrier):
lock = Lock(self.lock_path)
# p1 acquires write
- barrier.wait() # ---------------------------------------- 1
+ barrier.wait() # ---------------------------------------- 1
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 2
+ barrier.wait() # ---------------------------------------- 2
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 3
+ barrier.wait() # ---------------------------------------- 3
# p1 tests shared read
- barrier.wait() # ---------------------------------------- 4
+ barrier.wait() # ---------------------------------------- 4
# others release reads
- barrier.wait() # ---------------------------------------- 5
+ barrier.wait() # ---------------------------------------- 5
- lock.acquire_write() # upgrade read to write
- barrier.wait() # ---------------------------------------- 6
+ lock.acquire_write() # upgrade read to write
+ barrier.wait() # ---------------------------------------- 6
# others test timeout
- barrier.wait() # ---------------------------------------- 7
+ barrier.wait() # ---------------------------------------- 7
lock.release_write() # release read AND write (need both)
lock.release_read()
- barrier.wait() # ---------------------------------------- 8
+ barrier.wait() # ---------------------------------------- 8
# p3 acquires read
- barrier.wait() # ---------------------------------------- 9
+ barrier.wait() # ---------------------------------------- 9
# p3 upgrades read to write
- barrier.wait() # ---------------------------------------- 10
+ barrier.wait() # ---------------------------------------- 10
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 11
+ barrier.wait() # ---------------------------------------- 11
# p3 releases locks
- barrier.wait() # ---------------------------------------- 12
+ barrier.wait() # ---------------------------------------- 12
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 13
+ barrier.wait() # ---------------------------------------- 13
lock.release_read()
-
def p3(barrier):
lock = Lock(self.lock_path)
# p1 acquires write
- barrier.wait() # ---------------------------------------- 1
+ barrier.wait() # ---------------------------------------- 1
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 2
+ barrier.wait() # ---------------------------------------- 2
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 3
+ barrier.wait() # ---------------------------------------- 3
# p1 tests shared read
- barrier.wait() # ---------------------------------------- 4
+ barrier.wait() # ---------------------------------------- 4
lock.release_read()
- barrier.wait() # ---------------------------------------- 5
+ barrier.wait() # ---------------------------------------- 5
# p2 upgrades read to write
- barrier.wait() # ---------------------------------------- 6
+ barrier.wait() # ---------------------------------------- 6
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 7
+ barrier.wait() # ---------------------------------------- 7
# p2 releases write & read
- barrier.wait() # ---------------------------------------- 8
+ barrier.wait() # ---------------------------------------- 8
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 9
+ barrier.wait() # ---------------------------------------- 9
lock.acquire_write()
- barrier.wait() # ---------------------------------------- 10
+ barrier.wait() # ---------------------------------------- 10
# others test timeout
- barrier.wait() # ---------------------------------------- 11
+ barrier.wait() # ---------------------------------------- 11
lock.release_read() # release read AND write in opposite
lock.release_write() # order from before on p2
- barrier.wait() # ---------------------------------------- 12
+ barrier.wait() # ---------------------------------------- 12
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 13
+ barrier.wait() # ---------------------------------------- 13
lock.release_read()
self.multiproc_test(p1, p2, p3)
+
+ def test_transaction(self):
+ def enter_fn():
+ vals['entered'] = True
+
+ def exit_fn(t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ with ReadTransaction(lock, enter_fn, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ with WriteTransaction(lock, enter_fn, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+
+ def test_transaction_with_exception(self):
+ def enter_fn():
+ vals['entered'] = True
+
+ def exit_fn(t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+
+ def do_read_with_exception():
+ with ReadTransaction(lock, enter_fn, exit_fn):
+ raise Exception()
+
+ def do_write_with_exception():
+ with WriteTransaction(lock, enter_fn, exit_fn):
+ raise Exception()
+
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ self.assertRaises(Exception, do_read_with_exception)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ self.assertRaises(Exception, do_write_with_exception)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+
+ def test_transaction_with_context_manager(self):
+ class TestContextManager(object):
+
+ def __enter__(self):
+ vals['entered'] = True
+
+ def __exit__(self, t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ def exit_fn(t, v, tb):
+ vals['exited_fn'] = True
+ vals['exception_fn'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with ReadTransaction(lock, TestContextManager, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with ReadTransaction(lock, TestContextManager):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with WriteTransaction(lock, TestContextManager, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with WriteTransaction(lock, TestContextManager):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ def test_transaction_with_context_manager_and_exception(self):
+ class TestContextManager(object):
+
+ def __enter__(self):
+ vals['entered'] = True
+
+ def __exit__(self, t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ def exit_fn(t, v, tb):
+ vals['exited_fn'] = True
+ vals['exception_fn'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+
+ def do_read_with_exception(exit_fn):
+ with ReadTransaction(lock, TestContextManager, exit_fn):
+ raise Exception()
+
+ def do_write_with_exception(exit_fn):
+ with WriteTransaction(lock, TestContextManager, exit_fn):
+ raise Exception()
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_read_with_exception, exit_fn)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertTrue(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_read_with_exception, None)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_write_with_exception, exit_fn)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertTrue(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_write_with_exception, None)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py
index b7a45a3f72..87a43a529a 100644
--- a/lib/spack/spack/test/make_executable.py
+++ b/lib/spack/spack/test/make_executable.py
@@ -38,6 +38,7 @@ from spack.util.environment import path_put_first
class MakeExecutableTest(unittest.TestCase):
+
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
@@ -49,34 +50,30 @@ class MakeExecutableTest(unittest.TestCase):
path_put_first('PATH', [self.tmpdir])
-
def tearDown(self):
shutil.rmtree(self.tmpdir)
-
def test_make_normal(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(output=str).strip(), '-j8')
self.assertEqual(make('install', output=str).strip(), '-j8 install')
-
def test_make_explicit(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
-
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), '-j8 install')
def test_make_one_job(self):
make = MakeExecutable('make', 1)
self.assertEqual(make(output=str).strip(), '')
self.assertEqual(make('install', output=str).strip(), 'install')
-
def test_make_parallel_false(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(parallel=False, output=str).strip(), '')
- self.assertEqual(make('install', parallel=False, output=str).strip(), 'install')
-
+ self.assertEqual(make('install', parallel=False,
+ output=str).strip(), 'install')
def test_make_parallel_disabled(self):
make = MakeExecutable('make', 8)
@@ -100,26 +97,29 @@ class MakeExecutableTest(unittest.TestCase):
del os.environ['SPACK_NO_PARALLEL_MAKE']
-
def test_make_parallel_precedence(self):
make = MakeExecutable('make', 8)
# These should work
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
self.assertEqual(make(parallel=True, output=str).strip(), '')
- self.assertEqual(make('install', parallel=True, output=str).strip(), 'install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), 'install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
self.assertEqual(make(parallel=True, output=str).strip(), '')
- self.assertEqual(make('install', parallel=True, output=str).strip(), 'install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), 'install')
# These don't disable (false and random string)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), '-j8 install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), '-j8 install')
del os.environ['SPACK_NO_PARALLEL_MAKE']
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index b682d4e097..d6d7b30b7c 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -35,6 +35,7 @@ exclude = ['.hg', '.git', '.svn']
class MirrorTest(MockPackagesTest):
+
def setUp(self):
"""Sets up a mock package and a mock repo for each fetch strategy, to
ensure that the mirror can create archives for each of them.
@@ -42,7 +43,6 @@ class MirrorTest(MockPackagesTest):
super(MirrorTest, self).setUp()
self.repos = {}
-
def tearDown(self):
"""Destroy all the stages created by the repos in setup."""
super(MirrorTest, self).tearDown()
@@ -50,7 +50,6 @@ class MirrorTest(MockPackagesTest):
repo.destroy()
self.repos.clear()
-
def set_up_package(self, name, MockRepoClass, url_attr):
"""Set up a mock package to be mirrored.
Each package needs us to:
@@ -71,16 +70,14 @@ class MirrorTest(MockPackagesTest):
v = next(iter(pkg.versions))
pkg.versions[v][url_attr] = repo.url
-
def check_mirror(self):
with Stage('spack-mirror-test') as stage:
mirror_root = join_path(stage.path, 'test-mirror')
# register mirror with spack config
- mirrors = { 'spack-mirror-test' : 'file://' + mirror_root }
+ mirrors = {'spack-mirror-test': 'file://' + mirror_root}
spack.config.update_config('mirrors', mirrors)
-
os.chdir(stage.path)
spack.mirror.create(
mirror_root, self.repos, no_checksum=True)
@@ -110,16 +107,18 @@ class MirrorTest(MockPackagesTest):
original_path = mock_repo.path
if 'svn' in name:
# have to check out the svn repo to compare.
- original_path = join_path(mock_repo.path, 'checked_out')
+ original_path = join_path(
+ mock_repo.path, 'checked_out')
svn('checkout', mock_repo.url, original_path)
dcmp = dircmp(original_path, pkg.stage.source_path)
- # make sure there are no new files in the expanded tarball
+ # make sure there are no new files in the expanded
+ # tarball
self.assertFalse(dcmp.right_only)
# and that all original files are present.
- self.assertTrue(all(l in exclude for l in dcmp.left_only))
+ self.assertTrue(
+ all(l in exclude for l in dcmp.left_only))
spack.do_checksum = saved_checksum_setting
-
def test_git_mirror(self):
self.set_up_package('git-test', MockGitRepo, 'git')
self.check_mirror()
diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py
index b1194f2451..d5867f06ec 100644
--- a/lib/spack/spack/test/mock_database.py
+++ b/lib/spack/spack/test/mock_database.py
@@ -33,6 +33,7 @@ from spack.test.mock_packages_test import MockPackagesTest
class MockDatabase(MockPackagesTest):
+
def _mock_install(self, spec):
s = Spec(spec)
s.concretize()
@@ -95,8 +96,10 @@ class MockDatabase(MockPackagesTest):
self._mock_install('mpileaks ^zmpi')
def tearDown(self):
- for spec in spack.installed_db.query():
- spec.package.do_uninstall(spec)
+ with spack.installed_db.write_transaction():
+ for spec in spack.installed_db.query():
+ spec.package.do_uninstall(spec)
+
super(MockDatabase, self).tearDown()
shutil.rmtree(self.install_path)
spack.install_path = self.spack_install_path
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
index 595667bf35..82c2712b0e 100644
--- a/lib/spack/spack/test/mock_packages_test.py
+++ b/lib/spack/spack/test/mock_packages_test.py
@@ -34,20 +34,109 @@ from ordereddict_backport import OrderedDict
from spack.repository import RepoPath
from spack.spec import Spec
+platform = spack.architecture.platform()
+
+linux_os_name = 'debian'
+linux_os_version = '6'
+
+if platform.name == 'linux':
+ linux_os = platform.operating_system("default_os")
+ linux_os_name = linux_os.name
+ linux_os_version = linux_os.version
+
mock_compiler_config = """\
compilers:
- all:
- clang@3.3:
+- compiler:
+ spec: clang@3.3
+ operating_system: {0}{1}
+ paths:
cc: /path/to/clang
cxx: /path/to/clang++
f77: None
fc: None
- gcc@4.5.0:
+ modules: 'None'
+- compiler:
+ spec: gcc@4.5.0
+ operating_system: {0}{1}
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: CNL10
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: SuSE11
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: yosemite
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ paths:
cc: /path/to/gcc
cxx: /path/to/g++
f77: /path/to/gfortran
fc: /path/to/gfortran
-"""
+ operating_system: CNL10
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: SuSE11
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: yosemite
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: elcapitan
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: elcapitan
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+""".format(linux_os_name, linux_os_version)
mock_packages_config = """\
packages:
@@ -60,9 +149,15 @@ packages:
paths:
externalvirtual@2.0%clang@3.3: /path/to/external_virtual_clang
externalvirtual@1.0%gcc@4.5.0: /path/to/external_virtual_gcc
+ externalmodule:
+ buildable: False
+ modules:
+ externalmodule@1.0%gcc@4.5.0: external-module
"""
+
class MockPackagesTest(unittest.TestCase):
+
def initmock(self):
# Use the mock packages database for these tests. This allows
# us to set up contrived packages that don't interfere with
@@ -79,7 +174,8 @@ class MockPackagesTest(unittest.TestCase):
self.mock_user_config = os.path.join(self.temp_config, 'user')
mkdirp(self.mock_site_config)
mkdirp(self.mock_user_config)
- for confs in [('compilers.yaml', mock_compiler_config), ('packages.yaml', mock_packages_config)]:
+ for confs in [('compilers.yaml', mock_compiler_config),
+ ('packages.yaml', mock_packages_config)]:
conf_yaml = os.path.join(self.mock_site_config, confs[0])
with open(conf_yaml, 'w') as f:
f.write(confs[1])
@@ -90,12 +186,15 @@ class MockPackagesTest(unittest.TestCase):
spack.config.ConfigScope('site', self.mock_site_config)
spack.config.ConfigScope('user', self.mock_user_config)
+ # Keep tests from interfering with the actual module path.
+ self.real_share_path = spack.share_path
+ spack.share_path = tempfile.mkdtemp()
+
# Store changes to the package's dependencies so we can
# restore later.
self.saved_deps = {}
-
- def set_pkg_dep(self, pkg_name, spec):
+ def set_pkg_dep(self, pkg_name, spec, deptypes=spack.alldeps):
"""Alters dependence information for a package.
Adds a dependency on <spec> to pkg.
@@ -109,8 +208,9 @@ class MockPackagesTest(unittest.TestCase):
self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
# Change dep spec
- pkg.dependencies[spec.name] = { Spec(pkg_name) : spec }
-
+ # XXX(deptype): handle deptypes.
+ pkg.dependencies[spec.name] = {Spec(pkg_name): spec}
+ pkg._deptypes[spec.name] = set(deptypes)
def cleanmock(self):
"""Restore the real packages path after any test."""
@@ -119,15 +219,17 @@ class MockPackagesTest(unittest.TestCase):
shutil.rmtree(self.temp_config, ignore_errors=True)
spack.config.clear_config_caches()
+ # XXX(deptype): handle deptypes.
# Restore dependency changes that happened during the test
for pkg_name, (pkg, deps) in self.saved_deps.items():
pkg.dependencies.clear()
pkg.dependencies.update(deps)
+ shutil.rmtree(spack.share_path, ignore_errors=True)
+ spack.share_path = self.real_share_path
def setUp(self):
self.initmock()
-
def tearDown(self):
self.cleanmock()
diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py
index a8098b8eec..0ae7dbd516 100644
--- a/lib/spack/spack/test/mock_repo.py
+++ b/lib/spack/spack/test/mock_repo.py
@@ -40,6 +40,7 @@ tar = which('tar', required=True)
class MockRepo(object):
+
def __init__(self, stage_name, repo_name):
"""This creates a stage where some archive/repo files can be staged
for testing spack's fetch strategies."""
@@ -50,7 +51,6 @@ class MockRepo(object):
self.path = join_path(self.stage.path, repo_name)
mkdirp(self.path)
-
def destroy(self):
"""Destroy resources associated with this mock repo."""
if self.stage:
@@ -90,6 +90,7 @@ class MockArchive(MockRepo):
class MockVCSRepo(MockRepo):
+
def __init__(self, stage_name, repo_name):
"""This creates a stage and a repo directory within the stage."""
super(MockVCSRepo, self).__init__(stage_name, repo_name)
@@ -100,9 +101,12 @@ class MockVCSRepo(MockRepo):
class MockGitRepo(MockVCSRepo):
+
def __init__(self):
super(MockGitRepo, self).__init__('mock-git-stage', 'mock-git-repo')
+ self.url = 'file://' + self.path
+
with working_dir(self.path):
git('init')
@@ -140,13 +144,12 @@ class MockGitRepo(MockVCSRepo):
self.r1 = self.rev_hash(self.branch)
self.r1_file = self.branch_file
- self.url = self.path
-
def rev_hash(self, rev):
return git('rev-parse', rev, output=str).strip()
class MockSvnRepo(MockVCSRepo):
+
def __init__(self):
super(MockSvnRepo, self).__init__('mock-svn-stage', 'mock-svn-repo')
@@ -176,6 +179,7 @@ class MockSvnRepo(MockVCSRepo):
class MockHgRepo(MockVCSRepo):
+
def __init__(self):
super(MockHgRepo, self).__init__('mock-hg-stage', 'mock-hg-repo')
self.url = 'file://' + self.path
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index c73badf8f2..5e280d8e43 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -67,15 +67,41 @@ configuration_autoload_all = {
}
}
+configuration_prerequisites_direct = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'prerequisites': 'direct'
+ }
+ }
+}
+
+configuration_prerequisites_all = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'prerequisites': 'all'
+ }
+ }
+}
+
configuration_alter_environment = {
'enable': ['tcl'],
'tcl': {
'all': {
- 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
+ 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']},
+ 'environment': {
+ 'set': {'{name}_ROOT': '{prefix}'}
+ }
},
- 'arch=x86-linux': {
- 'environment': {'set': {'FOO': 'foo'},
- 'unset': ['BAR']}
+ 'platform=test target=x86_64': {
+ 'environment': {
+ 'set': {'FOO': 'foo'},
+ 'unset': ['BAR']
+ }
+ },
+ 'platform=test target=x86_32': {
+ 'load': ['foo/bar']
}
}
}
@@ -83,7 +109,8 @@ configuration_alter_environment = {
configuration_blacklist = {
'enable': ['tcl'],
'tcl': {
- 'blacklist': ['callpath'],
+ 'whitelist': ['zmpi'],
+ 'blacklist': ['callpath', 'mpi'],
'all': {
'autoload': 'direct'
}
@@ -100,8 +127,68 @@ configuration_conflicts = {
}
}
+configuration_wrong_conflicts = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'naming_scheme': '{name}/{version}-{compiler.name}',
+ 'all': {
+ 'conflict': ['{name}/{compiler.name}']
+ }
+ }
+}
+
+configuration_suffix = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'mpileaks': {
+ 'suffixes': {
+ '+debug': 'foo',
+ '~debug': 'bar'
+ }
+ }
+ }
+}
+
+
+class HelperFunctionsTests(MockPackagesTest):
+
+ def test_update_dictionary_extending_list(self):
+ target = {
+ 'foo': {
+ 'a': 1,
+ 'b': 2,
+ 'd': 4
+ },
+ 'bar': [1, 2, 4],
+ 'baz': 'foobar'
+ }
+ update = {
+ 'foo': {
+ 'c': 3,
+ },
+ 'bar': [3],
+ 'baz': 'foobaz',
+ 'newkey': {
+ 'd': 4
+ }
+ }
+ spack.modules.update_dictionary_extending_lists(target, update)
+ self.assertTrue(len(target) == 4)
+ self.assertTrue(len(target['foo']) == 4)
+ self.assertTrue(len(target['bar']) == 4)
+ self.assertEqual(target['baz'], 'foobaz')
+
+ def test_inspect_path(self):
+ env = spack.modules.inspect_path('/usr')
+ names = [item.name for item in env]
+ self.assertTrue('PATH' in names)
+ self.assertTrue('LIBRARY_PATH' in names)
+ self.assertTrue('LD_LIBRARY_PATH' in names)
+ self.assertTrue('CPATH' in names)
+
class TclTests(MockPackagesTest):
+
def setUp(self):
super(TclTests, self).setUp()
self.configuration_obj = spack.modules.CONFIGURATION
@@ -123,26 +210,39 @@ class TclTests(MockPackagesTest):
def test_simple_case(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpich@3.0.4 arch=x86-linux')
+ spec = spack.spec.Spec('mpich@3.0.4')
content = self.get_modulefile_content(spec)
self.assertTrue('module-whatis "mpich @3.0.4"' in content)
+ self.assertRaises(TypeError, spack.modules.dependencies,
+ spec, 'non-existing-tag')
def test_autoload(self):
spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
spack.modules.CONFIGURATION = configuration_autoload_all
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
+ def test_prerequisites(self):
+ spack.modules.CONFIGURATION = configuration_prerequisites_direct
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'prereq' in x]), 2)
+
+ spack.modules.CONFIGURATION = configuration_prerequisites_all
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'prereq' in x]), 5)
+
def test_alter_environment(self):
spack.modules.CONFIGURATION = configuration_alter_environment
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x
@@ -151,8 +251,10 @@ class TclTests(MockPackagesTest):
self.assertEqual(
len([x for x in content if 'setenv FOO "foo"' in x]), 1)
self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
+ self.assertEqual(
+ len([x for x in content if 'setenv MPILEAKS_ROOT' in x]), 1)
- spec = spack.spec.Spec('libdwarf arch=x64-linux')
+ spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x
@@ -161,17 +263,30 @@ class TclTests(MockPackagesTest):
self.assertEqual(
len([x for x in content if 'setenv FOO "foo"' in x]), 0)
self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0)
+ self.assertEqual(
+ len([x for x in content if 'is-loaded foo/bar' in x]), 1)
+ self.assertEqual(
+ len([x for x in content if 'module load foo/bar' in x]), 1)
+ self.assertEqual(
+ len([x for x in content if 'setenv LIBDWARF_ROOT' in x]), 1)
def test_blacklist(self):
spack.modules.CONFIGURATION = configuration_blacklist
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks ^zmpi')
+ content = self.get_modulefile_content(spec)
+ self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
+ self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
+ spec = spack.spec.Spec('callpath arch=x86-linux')
+ # Returns a StringIO instead of a string as no module file was written
+ self.assertRaises(AttributeError, self.get_modulefile_content, spec)
+ spec = spack.spec.Spec('zmpi arch=x86-linux')
content = self.get_modulefile_content(spec)
self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
def test_conflicts(self):
spack.modules.CONFIGURATION = configuration_conflicts
- spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ spec = spack.spec.Spec('mpileaks')
content = self.get_modulefile_content(spec)
self.assertEqual(
len([x for x in content if x.startswith('conflict')]), 2)
@@ -179,3 +294,57 @@ class TclTests(MockPackagesTest):
len([x for x in content if x == 'conflict mpileaks']), 1)
self.assertEqual(
len([x for x in content if x == 'conflict intel/14.0.1']), 1)
+
+ spack.modules.CONFIGURATION = configuration_wrong_conflicts
+ self.assertRaises(SystemExit, self.get_modulefile_content, spec)
+
+ def test_suffixes(self):
+ spack.modules.CONFIGURATION = configuration_suffix
+ spec = spack.spec.Spec('mpileaks+debug arch=x86-linux')
+ spec.concretize()
+ generator = spack.modules.TclModule(spec)
+ self.assertTrue('foo' in generator.use_name)
+
+ spec = spack.spec.Spec('mpileaks~debug arch=x86-linux')
+ spec.concretize()
+ generator = spack.modules.TclModule(spec)
+ self.assertTrue('bar' in generator.use_name)
+
+
+configuration_dotkit = {
+ 'enable': ['dotkit'],
+ 'dotkit': {
+ 'all': {
+ 'prerequisites': 'direct'
+ }
+ }
+}
+
+
+class DotkitTests(MockPackagesTest):
+
+ def setUp(self):
+ super(DotkitTests, self).setUp()
+ self.configuration_obj = spack.modules.CONFIGURATION
+ spack.modules.open = mock_open
+ # Make sure that a non-mocked configuration will trigger an error
+ spack.modules.CONFIGURATION = None
+
+ def tearDown(self):
+ del spack.modules.open
+ spack.modules.CONFIGURATION = self.configuration_obj
+ super(DotkitTests, self).tearDown()
+
+ def get_modulefile_content(self, spec):
+ spec.concretize()
+ generator = spack.modules.Dotkit(spec)
+ generator.write()
+ content = FILE_REGISTRY[generator.file_name].split('\n')
+ return content
+
+ def test_dotkit(self):
+ spack.modules.CONFIGURATION = configuration_dotkit
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ content = self.get_modulefile_content(spec)
+ self.assertTrue('#c spack' in content)
+ self.assertTrue('#d mpileaks @2.3' in content)
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index a33656adcc..a885374080 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -25,15 +25,10 @@
"""
Test for multi_method dispatch.
"""
-import unittest
-
import spack
from spack.multimethod import *
from spack.version import *
-from spack.spec import Spec
-from spack.multimethod import when
from spack.test.mock_packages_test import *
-from spack.version import *
class MultiMethodTest(MockPackagesTest):
@@ -42,7 +37,6 @@ class MultiMethodTest(MockPackagesTest):
pkg = spack.repo.get('multimethod@2.0')
self.assertRaises(NoSuchMethodError, pkg.no_version_2)
-
def test_one_version_match(self):
pkg = spack.repo.get('multimethod@1.0')
self.assertEqual(pkg.no_version_2(), 1)
@@ -53,7 +47,6 @@ class MultiMethodTest(MockPackagesTest):
pkg = spack.repo.get('multimethod@4.0')
self.assertEqual(pkg.no_version_2(), 4)
-
def test_version_overlap(self):
pkg = spack.repo.get('multimethod@2.0')
self.assertEqual(pkg.version_overlap(), 1)
@@ -61,7 +54,6 @@ class MultiMethodTest(MockPackagesTest):
pkg = spack.repo.get('multimethod@5.0')
self.assertEqual(pkg.version_overlap(), 2)
-
def test_mpi_version(self):
pkg = spack.repo.get('multimethod^mpich@3.0.4')
self.assertEqual(pkg.mpi_version(), 3)
@@ -72,7 +64,6 @@ class MultiMethodTest(MockPackagesTest):
pkg = spack.repo.get('multimethod^mpich@1.0')
self.assertEqual(pkg.mpi_version(), 1)
-
def test_undefined_mpi_version(self):
pkg = spack.repo.get('multimethod^mpich@0.4')
self.assertEqual(pkg.mpi_version(), 1)
@@ -80,7 +71,6 @@ class MultiMethodTest(MockPackagesTest):
pkg = spack.repo.get('multimethod^mpich@1.4')
self.assertEqual(pkg.mpi_version(), 1)
-
def test_default_works(self):
pkg = spack.repo.get('multimethod%gcc')
self.assertEqual(pkg.has_a_default(), 'gcc')
@@ -91,23 +81,18 @@ class MultiMethodTest(MockPackagesTest):
pkg = spack.repo.get('multimethod%pgi')
self.assertEqual(pkg.has_a_default(), 'default')
+ def test_target_match(self):
+ platform = spack.architecture.platform()
+ targets = platform.targets.values()
+ for target in targets[:-1]:
+ pkg = spack.repo.get('multimethod target=' + target.name)
+ self.assertEqual(pkg.different_by_target(), target.name)
- def test_architecture_match(self):
- pkg = spack.repo.get('multimethod arch=x86_64')
- self.assertEqual(pkg.different_by_architecture(), 'x86_64')
-
- pkg = spack.repo.get('multimethod arch=ppc64')
- self.assertEqual(pkg.different_by_architecture(), 'ppc64')
-
- pkg = spack.repo.get('multimethod arch=ppc32')
- self.assertEqual(pkg.different_by_architecture(), 'ppc32')
-
- pkg = spack.repo.get('multimethod arch=arm64')
- self.assertEqual(pkg.different_by_architecture(), 'arm64')
-
- pkg = spack.repo.get('multimethod arch=macos')
- self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
-
+ pkg = spack.repo.get('multimethod target=' + targets[-1].name)
+ if len(targets) == 1:
+ self.assertEqual(pkg.different_by_target(), targets[-1].name)
+ else:
+ self.assertRaises(NoSuchMethodError, pkg.different_by_target)
def test_dependency_match(self):
pkg = spack.repo.get('multimethod^zmpi')
@@ -121,7 +106,6 @@ class MultiMethodTest(MockPackagesTest):
pkg = spack.repo.get('multimethod^foobar')
self.assertEqual(pkg.different_by_dep(), 'mpich')
-
def test_virtual_dep_match(self):
pkg = spack.repo.get('multimethod^mpich2')
self.assertEqual(pkg.different_by_virtual_dep(), 2)
diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py
index b38ecd6179..7927fc8e60 100644
--- a/lib/spack/spack/test/namespace_trie.py
+++ b/lib/spack/spack/test/namespace_trie.py
@@ -32,7 +32,6 @@ class NamespaceTrieTest(unittest.TestCase):
def setUp(self):
self.trie = NamespaceTrie()
-
def test_add_single(self):
self.trie['foo'] = 'bar'
@@ -40,7 +39,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertTrue(self.trie.has_value('foo'))
self.assertEqual(self.trie['foo'], 'bar')
-
def test_add_multiple(self):
self.trie['foo.bar'] = 'baz'
@@ -54,7 +52,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertFalse(self.trie.is_prefix('foo.bar.baz'))
self.assertFalse(self.trie.has_value('foo.bar.baz'))
-
def test_add_three(self):
# add a three-level namespace
self.trie['foo.bar.baz'] = 'quux'
@@ -89,7 +86,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux'))
self.assertFalse(self.trie.has_value('foo.bar.baz.quux'))
-
def test_add_none_single(self):
self.trie['foo'] = None
self.assertTrue(self.trie.is_prefix('foo'))
@@ -99,8 +95,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertFalse(self.trie.is_prefix('foo.bar'))
self.assertFalse(self.trie.has_value('foo.bar'))
-
-
def test_add_none_multiple(self):
self.trie['foo.bar'] = None
diff --git a/lib/spack/spack/test/operating_system.py b/lib/spack/spack/test/operating_system.py
new file mode 100644
index 0000000000..8723f7244d
--- /dev/null
+++ b/lib/spack/spack/test/operating_system.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+""" Test checks if the operating_system class is created correctly and that
+the functions are using the correct operating_system. Also checks whether
+the operating_system correctly uses the compiler_strategy
+"""
+import unittest
+from spack.platforms.cray_xc import CrayXc
+from spack.platforms.linux import Linux
+from spack.platforms.darwin import Darwin
+from spack.operating_system.linux_distro import LinuxDistro
+from spack.operating_system.cnl import ComputeNodeLinux
+
+
+class TestOperatingSystem(unittest.TestCase):
+
+ def setUp(self):
+ cray_xc = CrayXc()
+ linux = Linux()
+ darwin = Darwin()
+ self.cray_operating_sys = cray_xc.operating_system('front_os')
+ self.cray_default_os = cray_xc.operating_system('default_os')
+ self.cray_back_os = cray_xc.operating_system('back_os')
+ self.darwin_operating_sys = darwin.operating_system('default_os')
+ self.linux_operating_sys = linux.operating_system('default_os')
+
+ def test_cray_front_end_operating_system(self):
+ self.assertIsInstance(self.cray_operating_sys, LinuxDistro)
+
+ def test_cray_front_end_compiler_strategy(self):
+ self.assertEquals(self.cray_operating_sys.compiler_strategy, "PATH")
+
+ def test_cray_back_end_operating_system(self):
+ self.assertIsInstance(self.cray_back_os, ComputeNodeLinux)
+
+ def test_cray_back_end_compiler_strategy(self):
+ self.assertEquals(self.cray_back_os.compiler_strategy, "MODULES")
+
+ def test_linux_operating_system(self):
+ self.assertIsInstance(self.linux_operating_sys, LinuxDistro)
+
+ def test_linux_compiler_strategy(self):
+ self.assertEquals(self.linux_operating_sys.compiler_strategy, "PATH")
+
+ def test_cray_front_end_compiler_list(self):
+ """ Operating systems will now be in charge of finding compilers.
+ So, depending on which operating system you want to build for
+ or which operating system you are on, then you could detect
+ compilers in a certain way. Cray linux environment on the front
+ end is just a regular linux distro whereas the Cray linux compute
+ node is a stripped down version which modules are important
+ """
+ self.assertEquals(True, False)
diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py
index b5ba0ecf35..a9a2b9abf5 100644
--- a/lib/spack/spack/test/optional_deps.py
+++ b/lib/spack/spack/test/optional_deps.py
@@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack.spec import Spec
from spack.test.mock_packages_test import *
+
class ConcretizeTest(MockPackagesTest):
def check_normalize(self, spec_string, expected):
@@ -34,10 +34,10 @@ class ConcretizeTest(MockPackagesTest):
self.assertEqual(spec, expected)
self.assertTrue(spec.eq_dag(expected))
-
def test_normalize_simple_conditionals(self):
self.check_normalize('optional-dep-test', Spec('optional-dep-test'))
- self.check_normalize('optional-dep-test~a', Spec('optional-dep-test~a'))
+ self.check_normalize('optional-dep-test~a',
+ Spec('optional-dep-test~a'))
self.check_normalize('optional-dep-test+a',
Spec('optional-dep-test+a', Spec('a')))
@@ -45,7 +45,6 @@ class ConcretizeTest(MockPackagesTest):
self.check_normalize('optional-dep-test a=true',
Spec('optional-dep-test a=true', Spec('a')))
-
self.check_normalize('optional-dep-test a=true',
Spec('optional-dep-test+a', Spec('a')))
@@ -55,25 +54,29 @@ class ConcretizeTest(MockPackagesTest):
self.check_normalize('optional-dep-test%intel',
Spec('optional-dep-test%intel', Spec('c')))
- self.check_normalize('optional-dep-test%intel@64.1',
- Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d')))
+ self.check_normalize(
+ 'optional-dep-test%intel@64.1',
+ Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d')))
- self.check_normalize('optional-dep-test%intel@64.1.2',
- Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d')))
+ self.check_normalize(
+ 'optional-dep-test%intel@64.1.2',
+ Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d')))
self.check_normalize('optional-dep-test%clang@35',
Spec('optional-dep-test%clang@35', Spec('e')))
-
def test_multiple_conditionals(self):
- self.check_normalize('optional-dep-test+a@1.1',
- Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b')))
+ self.check_normalize(
+ 'optional-dep-test+a@1.1',
+ Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b')))
- self.check_normalize('optional-dep-test+a%intel',
- Spec('optional-dep-test+a%intel', Spec('a'), Spec('c')))
+ self.check_normalize(
+ 'optional-dep-test+a%intel',
+ Spec('optional-dep-test+a%intel', Spec('a'), Spec('c')))
- self.check_normalize('optional-dep-test@1.1%intel',
- Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c')))
+ self.check_normalize(
+ 'optional-dep-test@1.1%intel',
+ Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c')))
self.check_normalize('optional-dep-test@1.1%intel@64.1.2+a',
Spec('optional-dep-test@1.1%intel@64.1.2+a',
@@ -83,14 +86,12 @@ class ConcretizeTest(MockPackagesTest):
Spec('optional-dep-test@1.1%clang@36.5+a',
Spec('b'), Spec('a'), Spec('e')))
-
def test_chained_mpi(self):
self.check_normalize('optional-dep-test-2+mpi',
Spec('optional-dep-test-2+mpi',
Spec('optional-dep-test+mpi',
Spec('mpi'))))
-
def test_default_variant(self):
spec = Spec('optional-dep-test-3')
spec.concretize()
@@ -104,7 +105,6 @@ class ConcretizeTest(MockPackagesTest):
spec.concretize()
self.assertTrue('b' in spec)
-
def test_transitive_chain(self):
# Each of these dependencies comes from a conditional
# dependency on another. This requires iterating to evaluate
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index 9198986f5d..c3c3923855 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -38,12 +38,10 @@ class PackageSanityTest(unittest.TestCase):
for name in spack.repo.all_package_names():
spack.repo.get(name)
-
def test_get_all_packages(self):
"""Get all packages once and make sure that works."""
self.check_db()
-
def test_get_all_mock_packages(self):
"""Get the mock packages once each too."""
db = RepoPath(spack.mock_packages_path)
@@ -51,7 +49,6 @@ class PackageSanityTest(unittest.TestCase):
self.check_db()
spack.repo.swap(db)
-
def test_url_versions(self):
"""Check URLs for regular packages, if they are explicitly defined."""
for pkg in spack.repo.all_packages():
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index bea42bb33a..fdd079a8f7 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
import spack
from llnl.util.filesystem import join_path
from spack.repository import Repo
@@ -33,33 +32,26 @@ from spack.util.naming import mod_to_class
class PackagesTest(MockPackagesTest):
def test_load_package(self):
- pkg = spack.repo.get('mpich')
-
+ spack.repo.get('mpich')
def test_package_name(self):
pkg = spack.repo.get('mpich')
self.assertEqual(pkg.name, 'mpich')
-
def test_package_filename(self):
repo = Repo(spack.mock_packages_path)
filename = repo.filename_for_package_name('mpich')
self.assertEqual(filename,
- join_path(spack.mock_packages_path, 'packages', 'mpich', 'package.py'))
-
-
- def test_package_name(self):
- pkg = spack.repo.get('mpich')
- self.assertEqual(pkg.name, 'mpich')
-
+ join_path(spack.mock_packages_path,
+ 'packages', 'mpich', 'package.py'))
def test_nonexisting_package_filename(self):
repo = Repo(spack.mock_packages_path)
filename = repo.filename_for_package_name('some-nonexisting-package')
self.assertEqual(
filename,
- join_path(spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py'))
-
+ join_path(spack.mock_packages_path,
+ 'packages', 'some-nonexisting-package', 'package.py'))
def test_package_class_names(self):
self.assertEqual('Mpich', mod_to_class('mpich'))
@@ -68,37 +60,32 @@ class PackagesTest(MockPackagesTest):
self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective'))
self.assertEqual('_3db', mod_to_class('3db'))
-
#
# Below tests target direct imports of spack packages from the
# spack.pkg namespace
#
def test_import_package(self):
- import spack.pkg.builtin.mock.mpich
-
+ import spack.pkg.builtin.mock.mpich # noqa
def test_import_package_as(self):
- import spack.pkg.builtin.mock.mpich as mp
-
+ import spack.pkg.builtin.mock.mpich as mp # noqa
def test_import_class_from_package(self):
- from spack.pkg.builtin.mock.mpich import Mpich
-
+ from spack.pkg.builtin.mock.mpich import Mpich # noqa
def test_import_module_from_package(self):
- from spack.pkg.builtin.mock import mpich
-
+ from spack.pkg.builtin.mock import mpich # noqa
def test_import_namespace_container_modules(self):
- import spack.pkg
- import spack.pkg as p
- from spack import pkg
+ import spack.pkg # noqa
+ import spack.pkg as p # noqa
+ from spack import pkg # noqa
- import spack.pkg.builtin
- import spack.pkg.builtin as b
- from spack.pkg import builtin
+ import spack.pkg.builtin # noqa
+ import spack.pkg.builtin as b # noqa
+ from spack.pkg import builtin # noqa
- import spack.pkg.builtin.mock
- import spack.pkg.builtin.mock as m
- from spack.pkg.builtin import mock
+ import spack.pkg.builtin.mock # noqa
+ import spack.pkg.builtin.mock as m # noqa
+ from spack.pkg.builtin import mock # noqa
diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py
index 3419d600b8..0c772a0d2d 100644
--- a/lib/spack/spack/test/pattern.py
+++ b/lib/spack/spack/test/pattern.py
@@ -41,6 +41,7 @@ class CompositeTest(unittest.TestCase):
raise NotImplemented('subtract not implemented')
class One(Base):
+
def add(self):
Base.counter += 1
@@ -48,6 +49,7 @@ class CompositeTest(unittest.TestCase):
Base.counter -= 1
class Two(Base):
+
def add(self):
Base.counter += 2
diff --git a/lib/spack/spack/test/provider_index.py b/lib/spack/spack/test/provider_index.py
new file mode 100644
index 0000000000..9847dd05a6
--- /dev/null
+++ b/lib/spack/spack/test/provider_index.py
@@ -0,0 +1,93 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Tests for provider index cache files.
+
+Tests assume that mock packages provide this:
+
+ {'blas': {
+ blas: set([netlib-blas, openblas, openblas-with-lapack])},
+ 'lapack': {lapack: set([netlib-lapack, openblas-with-lapack])},
+ 'mpi': {mpi@:1: set([mpich@:1]),
+ mpi@:2.0: set([mpich2]),
+ mpi@:2.1: set([mpich2@1.1:]),
+ mpi@:2.2: set([mpich2@1.2:]),
+ mpi@:3: set([mpich@3:]),
+ mpi@:10.0: set([zmpi])},
+ 'stuff': {stuff: set([externalvirtual])}}
+"""
+from StringIO import StringIO
+
+import spack
+from spack.spec import Spec
+from spack.provider_index import ProviderIndex
+from spack.test.mock_packages_test import *
+
+
+class ProviderIndexTest(MockPackagesTest):
+
+ def test_yaml_round_trip(self):
+ p = ProviderIndex(spack.repo.all_package_names())
+
+ ostream = StringIO()
+ p.to_yaml(ostream)
+
+ istream = StringIO(ostream.getvalue())
+ q = ProviderIndex.from_yaml(istream)
+
+ self.assertEqual(p, q)
+
+ def test_providers_for_simple(self):
+ p = ProviderIndex(spack.repo.all_package_names())
+
+ blas_providers = p.providers_for('blas')
+ self.assertTrue(Spec('netlib-blas') in blas_providers)
+ self.assertTrue(Spec('openblas') in blas_providers)
+ self.assertTrue(Spec('openblas-with-lapack') in blas_providers)
+
+ lapack_providers = p.providers_for('lapack')
+ self.assertTrue(Spec('netlib-lapack') in lapack_providers)
+ self.assertTrue(Spec('openblas-with-lapack') in lapack_providers)
+
+ def test_mpi_providers(self):
+ p = ProviderIndex(spack.repo.all_package_names())
+
+ mpi_2_providers = p.providers_for('mpi@2')
+ self.assertTrue(Spec('mpich2') in mpi_2_providers)
+ self.assertTrue(Spec('mpich@3:') in mpi_2_providers)
+
+ mpi_3_providers = p.providers_for('mpi@3')
+ self.assertTrue(Spec('mpich2') not in mpi_3_providers)
+ self.assertTrue(Spec('mpich@3:') in mpi_3_providers)
+ self.assertTrue(Spec('zmpi') in mpi_3_providers)
+
+ def test_equal(self):
+ p = ProviderIndex(spack.repo.all_package_names())
+ q = ProviderIndex(spack.repo.all_package_names())
+ self.assertEqual(p, q)
+
+ def test_copy(self):
+ p = ProviderIndex(spack.repo.all_package_names())
+ q = p.copy()
+ self.assertEqual(p, q)
diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py
index 6c09effc56..5af55bdc5f 100644
--- a/lib/spack/spack/test/python_version.py
+++ b/lib/spack/spack/test/python_version.py
@@ -36,7 +36,8 @@ import llnl.util.tty as tty
import pyqver2
import spack
-spack_max_version = (2,6)
+spack_max_version = (2, 6)
+
class PythonVersionTest(unittest.TestCase):
@@ -51,12 +52,10 @@ class PythonVersionTest(unittest.TestCase):
if re.match(r'^[^.#].*\.py$', filename):
yield os.path.join(root, filename)
-
def package_py_files(self):
for name in spack.repo.all_package_names():
yield spack.repo.filename_for_package_name(name)
-
def check_python_versions(self, *files):
# dict version -> filename -> reasons
all_issues = {}
@@ -66,7 +65,7 @@ class PythonVersionTest(unittest.TestCase):
versions = pyqver2.get_versions(pyfile.read())
for ver, reasons in versions.items():
if ver > spack_max_version:
- if not ver in all_issues:
+ if ver not in all_issues:
all_issues[ver] = {}
all_issues[ver][fn] = reasons
@@ -87,7 +86,7 @@ class PythonVersionTest(unittest.TestCase):
tty.error("These files require version %d.%d:" % v)
maxlen = max(len(f) for f, prob in msgs)
- fmt = "%%-%ds%%s" % (maxlen+3)
+ fmt = "%%-%ds%%s" % (maxlen + 3)
print fmt % ('File', 'Reason')
print fmt % ('-' * (maxlen), '-' * 20)
for msg in msgs:
@@ -95,10 +94,8 @@ class PythonVersionTest(unittest.TestCase):
self.assertTrue(len(all_issues) == 0)
-
def test_core_module_compatibility(self):
self.check_python_versions(*self.pyfiles(spack.lib_path))
-
def test_package_module_compatibility(self):
self.check_python_versions(*self.pyfiles(spack.packages_path))
diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py
index 6aea1a68c7..12abce7b35 100644
--- a/lib/spack/spack/test/sbang.py
+++ b/lib/spack/spack/test/sbang.py
@@ -26,6 +26,7 @@
Test that Spack's shebang filtering works correctly.
"""
import os
+import stat
import unittest
import tempfile
import shutil
@@ -34,12 +35,16 @@ from llnl.util.filesystem import *
from spack.hooks.sbang import filter_shebangs_in_directory
import spack
-short_line = "#!/this/is/short/bin/bash\n"
-long_line = "#!/this/" + ('x' * 200) + "/is/long\n"
-sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root
-last_line = "last!\n"
+short_line = "#!/this/is/short/bin/bash\n"
+long_line = "#!/this/" + ('x' * 200) + "/is/long\n"
+lua_line = "#!/this/" + ('x' * 200) + "/is/lua\n"
+lua_line_patched = "--!/this/" + ('x' * 200) + "/is/lua\n"
+sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root
+last_line = "last!\n"
+
class SbangTest(unittest.TestCase):
+
def setUp(self):
self.tempdir = tempfile.mkdtemp()
@@ -59,6 +64,12 @@ class SbangTest(unittest.TestCase):
f.write(long_line)
f.write(last_line)
+ # Lua script with long shebang
+ self.lua_shebang = os.path.join(self.tempdir, 'lua')
+ with open(self.lua_shebang, 'w') as f:
+ f.write(lua_line)
+ f.write(last_line)
+
# Script already using sbang.
self.has_shebang = os.path.join(self.tempdir, 'shebang')
with open(self.has_shebang, 'w') as f:
@@ -66,11 +77,8 @@ class SbangTest(unittest.TestCase):
f.write(long_line)
f.write(last_line)
-
def tearDown(self):
- shutil.rmtree(self.tempdir, ignore_errors=True)
-
-
+ shutil.rmtree(self.tempdir, ignore_errors=True)
def test_shebang_handling(self):
filter_shebangs_in_directory(self.tempdir)
@@ -86,8 +94,25 @@ class SbangTest(unittest.TestCase):
self.assertEqual(f.readline(), long_line)
self.assertEqual(f.readline(), last_line)
+ # Make sure this got patched.
+ with open(self.lua_shebang, 'r') as f:
+ self.assertEqual(f.readline(), sbang_line)
+ self.assertEqual(f.readline(), lua_line_patched)
+ self.assertEqual(f.readline(), last_line)
+
# Make sure this is untouched
with open(self.has_shebang, 'r') as f:
self.assertEqual(f.readline(), sbang_line)
self.assertEqual(f.readline(), long_line)
self.assertEqual(f.readline(), last_line)
+
+ def test_shebang_handles_non_writable_files(self):
+ # make a file non-writable
+ st = os.stat(self.long_shebang)
+ not_writable_mode = st.st_mode & ~stat.S_IWRITE
+ os.chmod(self.long_shebang, not_writable_mode)
+
+ self.test_shebang_handling()
+
+ st = os.stat(self.long_shebang)
+ self.assertEqual(oct(not_writable_mode), oct(st.st_mode))
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 52f4f7395e..8f61c7ac76 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -29,10 +29,9 @@ You can find the dummy packages here::
spack/lib/spack/spack/test/mock_packages
"""
import spack
+import spack.architecture
import spack.package
-from llnl.util.lang import list_modules
-
from spack.spec import Spec
from spack.test.mock_packages_test import *
@@ -50,21 +49,19 @@ class SpecDagTest(MockPackagesTest):
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
spec.normalize)
-
def test_preorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi')
dag.normalize()
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'zmpi', 'fake']
- pairs = zip([0,1,2,3,4,2,3], names)
+ pairs = zip([0, 1, 2, 3, 4, 2, 3], names)
traversal = dag.traverse()
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_preorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -72,14 +69,13 @@ class SpecDagTest(MockPackagesTest):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi']
- pairs = zip([0,1,2,3,4,3,2,3,1], names)
+ pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names)
traversal = dag.traverse(cover='edges')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='edges', depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_preorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -87,14 +83,13 @@ class SpecDagTest(MockPackagesTest):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
- pairs = zip([0,1,2,3,4,3,2,3,1,2], names)
+ pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names)
traversal = dag.traverse(cover='paths')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='paths', depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -102,14 +97,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
'callpath', 'mpileaks']
- pairs = zip([4,3,2,3,2,1,0], names)
+ pairs = zip([4, 3, 2, 3, 2, 1, 0], names)
traversal = dag.traverse(order='post')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -117,14 +111,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'zmpi', 'mpileaks']
- pairs = zip([4,3,3,2,3,2,1,1,0], names)
+ pairs = zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names)
traversal = dag.traverse(cover='edges', order='post')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='edges', depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -132,14 +125,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'fake', 'zmpi', 'mpileaks']
- pairs = zip([4,3,3,2,3,2,1,2,1,0], names)
+ pairs = zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names)
traversal = dag.traverse(cover='paths', order='post')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='paths', depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_conflicting_spec_constraints(self):
mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
@@ -147,11 +139,12 @@ class SpecDagTest(MockPackagesTest):
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
- mpileaks.dependencies['mpich'] = Spec('mpich@1.0')
- mpileaks.dependencies['callpath'].dependencies['mpich'] = Spec('mpich@2.0')
-
- self.assertRaises(spack.spec.InconsistentSpecError, mpileaks.flatten)
+ mpileaks._dependencies['mpich'].spec = Spec('mpich@1.0')
+ mpileaks._dependencies['callpath']. \
+ spec._dependencies['mpich'].spec = Spec('mpich@2.0')
+ self.assertRaises(spack.spec.InconsistentSpecError,
+ lambda: mpileaks.flat_dependencies(copy=False))
def test_normalize_twice(self):
"""Make sure normalize can be run twice on the same spec,
@@ -163,7 +156,6 @@ class SpecDagTest(MockPackagesTest):
spec.normalize()
self.assertEqual(n1, spec)
-
def test_normalize_a_lot(self):
spec = Spec('mpileaks')
spec.normalize()
@@ -171,7 +163,6 @@ class SpecDagTest(MockPackagesTest):
spec.normalize()
spec.normalize()
-
def test_normalize_with_virtual_spec(self):
dag = Spec('mpileaks',
Spec('callpath',
@@ -186,76 +177,80 @@ class SpecDagTest(MockPackagesTest):
# make sure nothing with the same name occurs twice
counts = {}
for spec in dag.traverse(key=id):
- if not spec.name in counts:
+ if spec.name not in counts:
counts[spec.name] = 0
counts[spec.name] += 1
for name in counts:
self.assertEqual(counts[name], 1, "Count for %s was not 1!" % name)
-
def check_links(self, spec_to_check):
for spec in spec_to_check.traverse():
- for dependent in spec.dependents.values():
+ for dependent in spec.dependents():
self.assertTrue(
- spec.name in dependent.dependencies,
- "%s not in dependencies of %s" % (spec.name, dependent.name))
+ spec.name in dependent.dependencies_dict(),
+ "%s not in dependencies of %s" %
+ (spec.name, dependent.name))
- for dependency in spec.dependencies.values():
+ for dependency in spec.dependencies():
self.assertTrue(
- spec.name in dependency.dependents,
- "%s not in dependents of %s" % (spec.name, dependency.name))
-
+ spec.name in dependency.dependents_dict(),
+ "%s not in dependents of %s" %
+ (spec.name, dependency.name))
def test_dependents_and_dependencies_are_correct(self):
spec = Spec('mpileaks',
- Spec('callpath',
- Spec('dyninst',
- Spec('libdwarf',
- Spec('libelf')),
- Spec('libelf')),
- Spec('mpi')),
- Spec('mpi'))
+ Spec('callpath',
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf')),
+ Spec('libelf')),
+ Spec('mpi')),
+ Spec('mpi'))
self.check_links(spec)
spec.normalize()
self.check_links(spec)
-
def test_unsatisfiable_version(self):
self.set_pkg_dep('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
-
+ self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
+ spec.normalize)
def test_unsatisfiable_compiler(self):
self.set_pkg_dep('mpileaks', 'mpich%gcc')
- spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
-
+ spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf'
+ ' ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError,
+ spec.normalize)
def test_unsatisfiable_compiler_version(self):
self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
- spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
-
+ spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf'
+ ' ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError,
+ spec.normalize)
def test_unsatisfiable_architecture(self):
- self.set_pkg_dep('mpileaks', 'mpich arch=bgqos_0')
- spec = Spec('mpileaks ^mpich arch=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
-
+ self.set_pkg_dep('mpileaks', 'mpich platform=test target=be')
+ spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath'
+ ' ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError,
+ spec.normalize)
def test_invalid_dep(self):
spec = Spec('libelf ^mpich')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
+ self.assertRaises(spack.spec.InvalidDependencyException,
+ spec.normalize)
spec = Spec('libelf ^libdwarf')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
+ self.assertRaises(spack.spec.InvalidDependencyException,
+ spec.normalize)
spec = Spec('mpich ^dyninst ^libelf')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
-
+ self.assertRaises(spack.spec.InvalidDependencyException,
+ spec.normalize)
def test_equal(self):
# Different spec structures to test for equality
@@ -294,10 +289,10 @@ class SpecDagTest(MockPackagesTest):
self.assertFalse(flip_flat.eq_dag(flip_dag))
self.assertFalse(dag.eq_dag(flip_dag))
-
def test_normalize_mpileaks(self):
# Spec parsed in from a string
- spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf')
+ spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11'
+ ' ^libdwarf')
# What that spec should look like after parsing
expected_flat = Spec(
@@ -360,7 +355,6 @@ class SpecDagTest(MockPackagesTest):
self.assertEqual(spec, non_unique_nodes)
self.assertFalse(spec.eq_dag(non_unique_nodes))
-
def test_normalize_with_virtual_package(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
spec.normalize()
@@ -376,7 +370,6 @@ class SpecDagTest(MockPackagesTest):
self.assertEqual(str(spec), str(expected_normalized))
-
def test_contains(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
self.assertTrue(Spec('mpi') in spec)
@@ -387,7 +380,6 @@ class SpecDagTest(MockPackagesTest):
self.assertFalse(Spec('libgoblin') in spec)
self.assertTrue(Spec('mpileaks') in spec)
-
def test_copy_simple(self):
orig = Spec('mpileaks')
copy = orig.copy()
@@ -404,7 +396,6 @@ class SpecDagTest(MockPackagesTest):
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
-
def test_copy_normalized(self):
orig = Spec('mpileaks')
orig.normalize()
@@ -422,7 +413,6 @@ class SpecDagTest(MockPackagesTest):
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
-
def test_copy_concretized(self):
orig = Spec('mpileaks')
orig.concretize()
@@ -439,3 +429,69 @@ class SpecDagTest(MockPackagesTest):
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
+
+ """
+ Here is the graph with deptypes labeled (assume all packages have a 'dt'
+ prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
+ 'link', 'r' for 'run').
+
+ use -bl-> top
+
+ top -b-> build1
+ top -bl-> link1
+ top -r-> run1
+
+ build1 -b-> build2
+ build1 -bl-> link2
+ build1 -r-> run2
+
+ link1 -bl-> link3
+
+ run1 -bl-> link5
+ run1 -r-> run3
+
+ link3 -b-> build2
+ link3 -bl-> link4
+
+ run3 -b-> build3
+ """
+
+ def test_deptype_traversal(self):
+ dag = Spec('dtuse')
+ dag.normalize()
+
+ names = ['dtuse', 'dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
+ 'dtlink1', 'dtlink3', 'dtlink4']
+
+ traversal = dag.traverse(deptype=('build', 'link'))
+ self.assertEqual([x.name for x in traversal], names)
+
+ def test_deptype_traversal_with_builddeps(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
+ 'dtlink1', 'dtlink3', 'dtlink4']
+
+ traversal = dag.traverse(deptype=('build', 'link'))
+ self.assertEqual([x.name for x in traversal], names)
+
+ def test_deptype_traversal_full(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', 'dtrun2',
+ 'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
+ 'dtrun3', 'dtbuild3']
+
+ traversal = dag.traverse(deptype=spack.alldeps)
+ self.assertEqual([x.name for x in traversal], names)
+
+ def test_deptype_traversal_run(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtrun1', 'dtrun3']
+
+ traversal = dag.traverse(deptype='run')
+ self.assertEqual([x.name for x in traversal], names)
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 0cb78b90ed..79ffc99298 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -22,17 +22,18 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import unittest
+import spack.architecture
from spack.spec import *
from spack.test.mock_packages_test import *
+
class SpecSematicsTest(MockPackagesTest):
"""This tests satisfies(), constrain() and other semantic operations
on specs."""
- # ================================================================================
+ # ========================================================================
# Utility functions to set everything up.
- # ================================================================================
+ # ========================================================================
def check_satisfies(self, spec, anon_spec, concrete=False):
left = Spec(spec, concrete=concrete)
try:
@@ -48,7 +49,6 @@ class SpecSematicsTest(MockPackagesTest):
# right by left. Reverse is not always true.
right.copy().constrain(left)
-
def check_unsatisfiable(self, spec, anon_spec, concrete=False):
left = Spec(spec, concrete=concrete)
try:
@@ -61,7 +61,6 @@ class SpecSematicsTest(MockPackagesTest):
self.assertRaises(UnsatisfiableSpecError, right.copy().constrain, left)
-
def check_constrain(self, expected, spec, constraint):
exp = Spec(expected)
spec = Spec(spec)
@@ -69,52 +68,48 @@ class SpecSematicsTest(MockPackagesTest):
spec.constrain(constraint)
self.assertEqual(exp, spec)
-
def check_constrain_changed(self, spec, constraint):
spec = Spec(spec)
self.assertTrue(spec.constrain(constraint))
-
def check_constrain_not_changed(self, spec, constraint):
spec = Spec(spec)
self.assertFalse(spec.constrain(constraint))
-
def check_invalid_constraint(self, spec, constraint):
spec = Spec(spec)
constraint = Spec(constraint)
self.assertRaises(UnsatisfiableSpecError, spec.constrain, constraint)
-
- # ================================================================================
+ # ========================================================================
# Satisfiability
- # ================================================================================
+ # ========================================================================
def test_satisfies(self):
self.check_satisfies('libelf@0.8.13', '@0:1')
self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1')
-
def test_satisfies_namespace(self):
self.check_satisfies('builtin.mpich', 'mpich')
self.check_satisfies('builtin.mock.mpich', 'mpich')
- # TODO: only works for deps now, but shouldn't we allow this for root spec?
+ # TODO: only works for deps now, but shouldn't we allow for root spec?
# self.check_satisfies('builtin.mock.mpich', 'mpi')
self.check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich')
self.check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich')
-
def test_satisfies_namespaced_dep(self):
- """Ensure spec from same or unspecified namespace satisfies namespace constraint."""
+ """Ensure spec from same or unspecified namespace satisfies namespace
+ constraint."""
self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
- self.check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich')
-
- self.check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich')
+ self.check_satisfies(
+ 'mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich')
+ self.check_unsatisfiable(
+ 'mpileaks ^builtin.mock.mpich', '^builtin.mpich')
def test_satisfies_compiler(self):
self.check_satisfies('foo%gcc', '%gcc')
@@ -122,7 +117,6 @@ class SpecSematicsTest(MockPackagesTest):
self.check_unsatisfiable('foo%intel', '%gcc')
self.check_unsatisfiable('foo%intel', '%pgi')
-
def test_satisfies_compiler_version(self):
self.check_satisfies('foo%gcc', '%gcc@4.7.2')
self.check_satisfies('foo%intel', '%intel@4.7.2')
@@ -137,14 +131,16 @@ class SpecSematicsTest(MockPackagesTest):
self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7')
self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3')
-
def test_satisfies_architecture(self):
- self.check_satisfies('foo arch=chaos_5_x86_64_ib', ' arch=chaos_5_x86_64_ib')
- self.check_satisfies('foo arch=bgqos_0', ' arch=bgqos_0')
-
- self.check_unsatisfiable('foo arch=bgqos_0', ' arch=chaos_5_x86_64_ib')
- self.check_unsatisfiable('foo arch=chaos_5_x86_64_ib', ' arch=bgqos_0')
-
+ self.check_satisfies(
+ 'foo platform=test target=frontend os=frontend',
+ 'platform=test target=frontend os=frontend')
+ self.check_satisfies(
+ 'foo platform=test target=backend os=backend',
+ 'platform=test target=backend', 'platform=test os=backend')
+ self.check_satisfies(
+ 'foo platform=test target=default_target os=default_os',
+ 'platform=test target=default_target os=default_os')
def test_satisfies_dependencies(self):
self.check_satisfies('mpileaks^mpich', '^mpich')
@@ -153,16 +149,18 @@ class SpecSematicsTest(MockPackagesTest):
self.check_unsatisfiable('mpileaks^mpich', '^zmpi')
self.check_unsatisfiable('mpileaks^zmpi', '^mpich')
-
def test_satisfies_dependency_versions(self):
self.check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3')
self.check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0')
- self.check_satisfies('mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
-
+ self.check_satisfies(
+ 'mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+ self.check_unsatisfiable(
+ 'mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+ self.check_unsatisfiable(
+ 'mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
+ self.check_unsatisfiable(
+ 'mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
def test_satisfies_virtual_dependencies(self):
self.check_satisfies('mpileaks^mpi', '^mpi')
@@ -171,7 +169,6 @@ class SpecSematicsTest(MockPackagesTest):
self.check_satisfies('mpileaks^mpi', '^zmpi')
self.check_unsatisfiable('mpileaks^mpich', '^zmpi')
-
def test_satisfies_virtual_dependency_versions(self):
self.check_satisfies('mpileaks^mpi@1.5', '^mpi@1.2:1.6')
self.check_unsatisfiable('mpileaks^mpi@3', '^mpi@1.2:1.6')
@@ -187,26 +184,23 @@ class SpecSematicsTest(MockPackagesTest):
self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich2')
self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0')
-
def test_satisfies_matching_variant(self):
self.check_satisfies('mpich+foo', 'mpich+foo')
self.check_satisfies('mpich~foo', 'mpich~foo')
self.check_satisfies('mpich foo=1', 'mpich foo=1')
- #confirm that synonymous syntax works correctly
+ # confirm that synonymous syntax works correctly
self.check_satisfies('mpich+foo', 'mpich foo=True')
self.check_satisfies('mpich foo=true', 'mpich+foo')
self.check_satisfies('mpich~foo', 'mpich foo=FALSE')
self.check_satisfies('mpich foo=False', 'mpich~foo')
-
def test_satisfies_unconstrained_variant(self):
# only asked for mpich, no constraints. Either will do.
self.check_satisfies('mpich+foo', 'mpich')
self.check_satisfies('mpich~foo', 'mpich')
self.check_satisfies('mpich foo=1', 'mpich')
-
def test_unsatisfiable_variants(self):
# This case is different depending on whether the specs are concrete.
@@ -220,24 +214,21 @@ class SpecSematicsTest(MockPackagesTest):
self.check_unsatisfiable('mpich', 'mpich~foo', True)
self.check_unsatisfiable('mpich', 'mpich foo=1', True)
-
def test_unsatisfiable_variant_mismatch(self):
# No matchi in specs
self.check_unsatisfiable('mpich~foo', 'mpich+foo')
self.check_unsatisfiable('mpich+foo', 'mpich~foo')
self.check_unsatisfiable('mpich foo=1', 'mpich foo=2')
-
def test_satisfies_matching_compiler_flag(self):
self.check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"')
- self.check_satisfies('mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"')
-
+ self.check_satisfies('mpich cppflags="-O3 -Wall"',
+ 'mpich cppflags="-O3 -Wall"')
def test_satisfies_unconstrained_compiler_flag(self):
# only asked for mpich, no constraints. Any will do.
self.check_satisfies('mpich cppflags="-O3"', 'mpich')
-
def test_unsatisfiable_compiler_flag(self):
# This case is different depending on whether the specs are concrete.
@@ -247,11 +238,10 @@ class SpecSematicsTest(MockPackagesTest):
# 'mpich' is concrete:
self.check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True)
-
def test_unsatisfiable_compiler_flag_mismatch(self):
# No matchi in specs
- self.check_unsatisfiable('mpich cppflags="-O3"', 'mpich cppflags="-O2"')
-
+ self.check_unsatisfiable(
+ 'mpich cppflags="-O3"', 'mpich cppflags="-O2"')
def test_satisfies_virtual(self):
# Don't use check_satisfies: it checks constrain() too, and
@@ -260,25 +250,30 @@ class SpecSematicsTest(MockPackagesTest):
self.assertTrue(Spec('mpich2').satisfies(Spec('mpi')))
self.assertTrue(Spec('zmpi').satisfies(Spec('mpi')))
-
def test_satisfies_virtual_dep_with_virtual_constraint(self):
"""Ensure we can satisfy virtual constraints when there are multiple
vdep providers in the specs."""
- self.assertTrue(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^openblas'))
- self.assertFalse(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^openblas'))
-
- self.assertFalse(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^netlib-blas'))
- self.assertTrue(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^netlib-blas'))
-
-
- # ================================================================================
+ self.assertTrue(
+ Spec('netlib-lapack ^openblas').satisfies(
+ 'netlib-lapack ^openblas'))
+ self.assertFalse(
+ Spec('netlib-lapack ^netlib-blas').satisfies(
+ 'netlib-lapack ^openblas'))
+
+ self.assertFalse(
+ Spec('netlib-lapack ^openblas').satisfies(
+ 'netlib-lapack ^netlib-blas'))
+ self.assertTrue(
+ Spec('netlib-lapack ^netlib-blas').satisfies(
+ 'netlib-lapack ^netlib-blas'))
+
+ # ========================================================================
# Indexing specs
- # ================================================================================
+ # ========================================================================
def test_self_index(self):
s = Spec('callpath')
self.assertTrue(s['callpath'] == s)
-
def test_dep_index(self):
s = Spec('callpath')
s.normalize()
@@ -294,7 +289,6 @@ class SpecSematicsTest(MockPackagesTest):
self.assertTrue(s['libelf'].name == 'libelf')
self.assertTrue(s['mpi'].name == 'mpi')
-
def test_spec_contains_deps(self):
s = Spec('callpath')
s.normalize()
@@ -303,7 +297,6 @@ class SpecSematicsTest(MockPackagesTest):
self.assertTrue('libelf' in s)
self.assertTrue('mpi' in s)
-
def test_virtual_index(self):
s = Spec('callpath')
s.concretize()
@@ -317,7 +310,6 @@ class SpecSematicsTest(MockPackagesTest):
s_zmpi = Spec('callpath ^zmpi')
s_zmpi.concretize()
-
self.assertTrue(s['mpi'].name != 'mpi')
self.assertTrue(s_mpich['mpi'].name == 'mpich')
self.assertTrue(s_mpich2['mpi'].name == 'mpich2')
@@ -326,52 +318,62 @@ class SpecSematicsTest(MockPackagesTest):
for spec in [s, s_mpich, s_mpich2, s_zmpi]:
self.assertTrue('mpi' in spec)
-
- # ================================================================================
+ # ========================================================================
# Constraints
- # ================================================================================
+ # ========================================================================
def test_constrain_variants(self):
self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3')
self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6',
- 'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7')
+ 'libelf@0:2.5%gcc@2:4.6',
+ 'libelf@2.1:3%gcc@4.5:4.7')
self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
- self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo')
+ self.check_constrain('libelf+debug+foo',
+ 'libelf+debug', 'libelf+debug+foo')
- self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf foo=1')
- self.check_constrain('libelf debug=2 foo=1', 'libelf debug=2', 'libelf debug=2 foo=1')
+ self.check_constrain('libelf debug=2 foo=1',
+ 'libelf debug=2', 'libelf foo=1')
+ self.check_constrain('libelf debug=2 foo=1',
+ 'libelf debug=2', 'libelf debug=2 foo=1')
self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
- self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo')
-
+ self.check_constrain('libelf+debug~foo',
+ 'libelf+debug', 'libelf+debug~foo')
def test_constrain_compiler_flags(self):
- self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cppflags="-Wall"')
- self.check_constrain('libelf cflags="-O3" cppflags="-Wall"', 'libelf cflags="-O3"', 'libelf cflags="-O3" cppflags="-Wall"')
-
-
- def test_constrain_arch(self):
- self.check_constrain('libelf arch=bgqos_0', 'libelf arch=bgqos_0', 'libelf arch=bgqos_0')
- self.check_constrain('libelf arch=bgqos_0', 'libelf', 'libelf arch=bgqos_0')
-
+ self.check_constrain('libelf cflags="-O3" cppflags="-Wall"',
+ 'libelf cflags="-O3"', 'libelf cppflags="-Wall"')
+ self.check_constrain('libelf cflags="-O3" cppflags="-Wall"',
+ 'libelf cflags="-O3"',
+ 'libelf cflags="-O3" cppflags="-Wall"')
+
+ def test_constrain_architecture(self):
+ self.check_constrain('libelf target=default_target os=default_os',
+ 'libelf target=default_target os=default_os',
+ 'libelf target=default_target os=default_os')
+ self.check_constrain('libelf target=default_target os=default_os',
+ 'libelf',
+ 'libelf target=default_target os=default_os')
def test_constrain_compiler(self):
- self.check_constrain('libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7')
- self.check_constrain('libelf %gcc@4.4.7', 'libelf', 'libelf %gcc@4.4.7')
-
+ self.check_constrain('libelf %gcc@4.4.7',
+ 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7')
+ self.check_constrain('libelf %gcc@4.4.7',
+ 'libelf', 'libelf %gcc@4.4.7')
def test_invalid_constraint(self):
self.check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3')
- self.check_invalid_constraint('libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7')
+ self.check_invalid_constraint(
+ 'libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7')
self.check_invalid_constraint('libelf+debug', 'libelf~debug')
self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
self.check_invalid_constraint('libelf debug=2', 'libelf debug=1')
- self.check_invalid_constraint('libelf cppflags="-O3"', 'libelf cppflags="-O2"')
-
- self.check_invalid_constraint('libelf arch=bgqos_0', 'libelf arch=x86_54')
-
+ self.check_invalid_constraint(
+ 'libelf cppflags="-O3"', 'libelf cppflags="-O2"')
+ self.check_invalid_constraint('libelf platform=test target=be os=be',
+ 'libelf target=fe os=fe')
def test_constrain_changed(self):
self.check_constrain_changed('libelf', '@1.0')
@@ -382,8 +384,12 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_changed('libelf', '~debug')
self.check_constrain_changed('libelf', 'debug=2')
self.check_constrain_changed('libelf', 'cppflags="-O3"')
- self.check_constrain_changed('libelf', ' arch=bgqos_0')
+ platform = spack.architecture.platform()
+ self.check_constrain_changed(
+ 'libelf', 'target=' + platform.target('default_target').name)
+ self.check_constrain_changed(
+ 'libelf', 'os=' + platform.operating_system('default_os').name)
def test_constrain_not_changed(self):
self.check_constrain_not_changed('libelf', 'libelf')
@@ -394,11 +400,13 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_not_changed('libelf+debug', '+debug')
self.check_constrain_not_changed('libelf~debug', '~debug')
self.check_constrain_not_changed('libelf debug=2', 'debug=2')
- self.check_constrain_not_changed('libelf cppflags="-O3"', 'cppflags="-O3"')
- self.check_constrain_not_changed('libelf arch=bgqos_0', ' arch=bgqos_0')
- self.check_constrain_not_changed('libelf^foo', 'libelf^foo')
- self.check_constrain_not_changed('libelf^foo^bar', 'libelf^foo^bar')
+ self.check_constrain_not_changed(
+ 'libelf cppflags="-O3"', 'cppflags="-O3"')
+ platform = spack.architecture.platform()
+ default_target = platform.target('default_target').name
+ self.check_constrain_not_changed(
+ 'libelf target=' + default_target, 'target=' + default_target)
def test_constrain_dependency_changed(self):
self.check_constrain_changed('libelf^foo', 'libelf^foo@1.0')
@@ -407,17 +415,28 @@ class SpecSematicsTest(MockPackagesTest):
self.check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
self.check_constrain_changed('libelf^foo', 'libelf^foo+debug')
self.check_constrain_changed('libelf^foo', 'libelf^foo~debug')
- self.check_constrain_changed('libelf^foo', 'libelf^foo cppflags="-O3"')
- self.check_constrain_changed('libelf^foo', 'libelf^foo arch=bgqos_0')
+ platform = spack.architecture.platform()
+ default_target = platform.target('default_target').name
+ self.check_constrain_changed(
+ 'libelf^foo', 'libelf^foo target=' + default_target)
def test_constrain_dependency_not_changed(self):
self.check_constrain_not_changed('libelf^foo@1.0', 'libelf^foo@1.0')
- self.check_constrain_not_changed('libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0')
+ self.check_constrain_not_changed(
+ 'libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0')
self.check_constrain_not_changed('libelf^foo%gcc', 'libelf^foo%gcc')
- self.check_constrain_not_changed('libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
- self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
- self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
- self.check_constrain_not_changed('libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
- self.check_constrain_not_changed('libelf^foo arch=bgqos_0', 'libelf^foo arch=bgqos_0')
-
+ self.check_constrain_not_changed(
+ 'libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
+ self.check_constrain_not_changed(
+ 'libelf^foo+debug', 'libelf^foo+debug')
+ self.check_constrain_not_changed(
+ 'libelf^foo~debug', 'libelf^foo~debug')
+ self.check_constrain_not_changed(
+ 'libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
+
+ platform = spack.architecture.platform()
+ default_target = platform.target('default_target').name
+ self.check_constrain_not_changed(
+ 'libelf^foo target=' + default_target,
+ 'libelf^foo target=' + default_target)
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index c4e4c9cdfe..3079288c77 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -55,27 +55,32 @@ complex_lex = [Token(ID, 'mvapich_foo'),
class SpecSyntaxTest(unittest.TestCase):
- # ================================================================================
+ # ========================================================================
# Parse checks
- # ================================================================================
- def check_parse(self, expected, spec=None):
+ # ========================================================================
+
+ def check_parse(self, expected, spec=None, remove_arch=True):
"""Assert that the provided spec is able to be parsed.
- If this is called with one argument, it assumes that the string is
- canonical (i.e., no spaces and ~ instead of - for variants) and that it
- will convert back to the string it came from.
- If this is called with two arguments, the first argument is the expected
- canonical form and the second is a non-canonical input to be parsed.
+ If this is called with one argument, it assumes that the
+ string is canonical (i.e., no spaces and ~ instead of - for
+ variants) and that it will convert back to the string it came
+ from.
+
+ If this is called with two arguments, the first argument is
+ the expected canonical form and the second is a non-canonical
+ input to be parsed.
+
"""
if spec is None:
spec = expected
output = spack.spec.parse(spec)
+
parsed = (" ".join(str(spec) for spec in output))
self.assertEqual(expected, parsed)
-
def check_lex(self, tokens, spec):
- """Check that the provided spec parses to the provided list of tokens."""
+ """Check that the provided spec parses to the provided token list."""
lex_output = SpecLexer().lex(spec)
for tok, spec_tok in zip(tokens, lex_output):
if tok.type == ID:
@@ -84,9 +89,9 @@ class SpecSyntaxTest(unittest.TestCase):
# Only check the type for non-identifiers.
self.assertEqual(tok.type, spec_tok.type)
- # ================================================================================
+ # ========================================================================
# Parse checks
- # ===============================================================================
+ # ========================================================================
def test_package_names(self):
self.check_parse("mvapich")
self.check_parse("mvapich_foo")
@@ -103,18 +108,37 @@ class SpecSyntaxTest(unittest.TestCase):
self.check_parse("openmpi^hwloc@1.2e6:1.4b7-rc3")
def test_full_specs(self):
- self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4^stackwalker@8.1_1e")
- self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4^stackwalker@8.1_1e")
- self.check_parse('mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4^stackwalker@8.1_1e')
+ self.check_parse(
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4"
+ "^stackwalker@8.1_1e")
+ self.check_parse(
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2~qt_4"
+ "^stackwalker@8.1_1e")
+ self.check_parse(
+ 'mvapich_foo'
+ '^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3"+debug~qt_4'
+ '^stackwalker@8.1_1e')
def test_canonicalize(self):
self.check_parse(
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e",
- "mvapich_foo ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
+ "^stackwalker@8.1_1e",
+
+ "mvapich_foo "
+ "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 "
+ "^stackwalker@8.1_1e")
self.check_parse(
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e",
- "mvapich_foo ^stackwalker@8.1_1e ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
+ "^stackwalker@8.1_1e",
+
+ "mvapich_foo "
+ "^stackwalker@8.1_1e "
+ "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug")
self.check_parse(
"x^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f",
@@ -129,58 +153,81 @@ class SpecSyntaxTest(unittest.TestCase):
self.assertRaises(SpecParseError, self.check_parse, "x::")
def test_duplicate_variant(self):
- self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug")
- self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug debug=true")
- self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false debug=true")
- self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2 debug=false~debug")
-
+ self.assertRaises(DuplicateVariantError,
+ self.check_parse, "x@1.2+debug+debug")
+ self.assertRaises(DuplicateVariantError,
+ self.check_parse, "x ^y@1.2+debug debug=true")
+ self.assertRaises(DuplicateVariantError, self.check_parse,
+ "x ^y@1.2 debug=false debug=true")
+ self.assertRaises(DuplicateVariantError,
+ self.check_parse, "x ^y@1.2 debug=false~debug")
def test_duplicate_depdendence(self):
- self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y")
+ self.assertRaises(DuplicateDependencyError,
+ self.check_parse, "x ^y ^y")
def test_duplicate_compiler(self):
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%intel")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%gcc")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%gcc%intel")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%intel")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%gcc")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%gcc%intel")
-
-
- # ================================================================================
+ self.assertRaises(DuplicateCompilerSpecError,
+ self.check_parse, "x%intel%intel")
+ self.assertRaises(DuplicateCompilerSpecError,
+ self.check_parse, "x%intel%gcc")
+ self.assertRaises(DuplicateCompilerSpecError,
+ self.check_parse, "x%gcc%intel")
+ self.assertRaises(DuplicateCompilerSpecError,
+ self.check_parse, "x ^y%intel%intel")
+ self.assertRaises(DuplicateCompilerSpecError,
+ self.check_parse, "x ^y%intel%gcc")
+ self.assertRaises(DuplicateCompilerSpecError,
+ self.check_parse, "x ^y%gcc%intel")
+
+ # ========================================================================
# Lex checks
- # ================================================================================
+ # ========================================================================
def test_ambiguous(self):
# This first one is ambiguous because - can be in an identifier AND
# indicate disabling an option.
self.assertRaises(
AssertionError, self.check_lex, complex_lex,
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4^stackwalker@8.1_1e")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4"
+ "^stackwalker@8.1_1e")
- # The following lexes are non-ambiguous (add a space before -qt_4) and should all
- # result in the tokens in complex_lex
+ # The following lexes are non-ambiguous (add a space before -qt_4)
+ # and should all result in the tokens in complex_lex
def test_minimal_spaces(self):
self.check_lex(
complex_lex,
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4^stackwalker@8.1_1e")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4"
+ "^stackwalker@8.1_1e")
self.check_lex(
complex_lex,
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
+ "^stackwalker@8.1_1e")
def test_spaces_between_dependences(self):
self.check_lex(
complex_lex,
- "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 ^stackwalker @ 8.1_1e")
+ "mvapich_foo "
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 "
+ "^stackwalker @ 8.1_1e")
self.check_lex(
complex_lex,
- "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker @ 8.1_1e")
+ "mvapich_foo "
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 "
+ "^stackwalker @ 8.1_1e")
def test_spaces_between_options(self):
self.check_lex(
complex_lex,
- "mvapich_foo ^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 ^stackwalker @8.1_1e")
+ "mvapich_foo "
+ "^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 "
+ "^stackwalker @8.1_1e")
def test_way_too_many_spaces(self):
self.check_lex(
complex_lex,
- "mvapich_foo ^ _openmpi @ 1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 ^ stackwalker @ 8.1_1e")
+ "mvapich_foo "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index 0230fc203a..964aea9422 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -30,41 +30,36 @@ YAML format preserves DAG informatoin in the spec.
from spack.spec import Spec
from spack.test.mock_packages_test import *
-class SpecDagTest(MockPackagesTest):
+
+class SpecYamlTest(MockPackagesTest):
def check_yaml_round_trip(self, spec):
yaml_text = spec.to_yaml()
spec_from_yaml = Spec.from_yaml(yaml_text)
self.assertTrue(spec.eq_dag(spec_from_yaml))
-
def test_simple_spec(self):
spec = Spec('mpileaks')
self.check_yaml_round_trip(spec)
-
def test_normal_spec(self):
spec = Spec('mpileaks+debug~opt')
spec.normalize()
self.check_yaml_round_trip(spec)
-
def test_ambiguous_version_spec(self):
spec = Spec('mpileaks@1.0:5.0,6.1,7.3+debug~opt')
spec.normalize()
self.check_yaml_round_trip(spec)
-
def test_concrete_spec(self):
spec = Spec('mpileaks+debug~opt')
spec.concretize()
self.check_yaml_round_trip(spec)
-
def test_yaml_subdag(self):
spec = Spec('mpileaks^mpich+debug')
spec.concretize()
-
yaml_spec = Spec.from_yaml(spec.to_yaml())
for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py
index 6d8c3ac67c..ec661bfe50 100644
--- a/lib/spack/spack/test/stage.py
+++ b/lib/spack/spack/test/stage.py
@@ -35,8 +35,8 @@ from llnl.util.filesystem import *
from spack.stage import Stage
from spack.util.executable import which
-test_files_dir = join_path(spack.stage_path, '.test')
-test_tmp_path = join_path(test_files_dir, 'tmp')
+test_files_dir = os.path.realpath(join_path(spack.stage_path, '.test'))
+test_tmp_path = os.path.realpath(join_path(test_files_dir, 'tmp'))
archive_dir = 'test-files'
archive_name = archive_dir + '.tar.gz'
@@ -62,6 +62,7 @@ def use_tmp(use_tmp):
class StageTest(unittest.TestCase):
+
def setUp(self):
"""This sets up a mock archive to fetch, and a mock temp space for use
by the Stage class. It doesn't actually create the Stage -- that
@@ -89,7 +90,6 @@ class StageTest(unittest.TestCase):
# be removed.
self.working_dir = os.getcwd()
-
def tearDown(self):
"""Blows away the test environment directory."""
shutil.rmtree(test_files_dir)
@@ -100,7 +100,6 @@ class StageTest(unittest.TestCase):
# restore spack's original tmp environment
spack.tmp_dirs = self.old_tmp_dirs
-
def get_stage_path(self, stage, stage_name):
"""Figure out where a stage should be living. This depends on
whether it's named.
@@ -114,7 +113,6 @@ class StageTest(unittest.TestCase):
self.assertTrue(stage.path.startswith(spack.stage_path))
return stage.path
-
def check_setup(self, stage, stage_name):
"""Figure out whether a stage was set up correctly."""
stage_path = self.get_stage_path(stage, stage_name)
@@ -139,14 +137,12 @@ class StageTest(unittest.TestCase):
# Make sure the stage path is NOT a link for a non-tmp stage
self.assertFalse(os.path.islink(stage_path))
-
def check_fetch(self, stage, stage_name):
stage_path = self.get_stage_path(stage, stage_name)
self.assertTrue(archive_name in os.listdir(stage_path))
self.assertEqual(join_path(stage_path, archive_name),
stage.fetcher.archive_file)
-
def check_expand_archive(self, stage, stage_name):
stage_path = self.get_stage_path(stage, stage_name)
self.assertTrue(archive_name in os.listdir(stage_path))
@@ -162,19 +158,16 @@ class StageTest(unittest.TestCase):
with open(readme) as file:
self.assertEqual(readme_text, file.read())
-
def check_chdir(self, stage, stage_name):
stage_path = self.get_stage_path(stage, stage_name)
self.assertEqual(os.path.realpath(stage_path), os.getcwd())
-
def check_chdir_to_source(self, stage, stage_name):
stage_path = self.get_stage_path(stage, stage_name)
self.assertEqual(
join_path(os.path.realpath(stage_path), archive_dir),
os.getcwd())
-
def check_destroy(self, stage, stage_name):
"""Figure out whether a stage was destroyed correctly."""
stage_path = self.get_stage_path(stage, stage_name)
@@ -187,35 +180,30 @@ class StageTest(unittest.TestCase):
target = os.path.realpath(stage_path)
self.assertFalse(os.path.exists(target))
-
def test_setup_and_destroy_name_with_tmp(self):
with use_tmp(True):
with Stage(archive_url, name=stage_name) as stage:
self.check_setup(stage, stage_name)
self.check_destroy(stage, stage_name)
-
def test_setup_and_destroy_name_without_tmp(self):
with use_tmp(False):
with Stage(archive_url, name=stage_name) as stage:
self.check_setup(stage, stage_name)
self.check_destroy(stage, stage_name)
-
def test_setup_and_destroy_no_name_with_tmp(self):
with use_tmp(True):
with Stage(archive_url) as stage:
self.check_setup(stage, None)
self.check_destroy(stage, None)
-
def test_setup_and_destroy_no_name_without_tmp(self):
with use_tmp(False):
with Stage(archive_url) as stage:
self.check_setup(stage, None)
self.check_destroy(stage, None)
-
def test_chdir(self):
with Stage(archive_url, name=stage_name) as stage:
stage.chdir()
@@ -223,7 +211,6 @@ class StageTest(unittest.TestCase):
self.check_chdir(stage, stage_name)
self.check_destroy(stage, stage_name)
-
def test_fetch(self):
with Stage(archive_url, name=stage_name) as stage:
stage.fetch()
@@ -232,7 +219,6 @@ class StageTest(unittest.TestCase):
self.check_fetch(stage, stage_name)
self.check_destroy(stage, stage_name)
-
def test_expand_archive(self):
with Stage(archive_url, name=stage_name) as stage:
stage.fetch()
@@ -242,8 +228,7 @@ class StageTest(unittest.TestCase):
self.check_expand_archive(stage, stage_name)
self.check_destroy(stage, stage_name)
-
- def test_expand_archive(self):
+ def test_expand_archive_with_chdir(self):
with Stage(archive_url, name=stage_name) as stage:
stage.fetch()
self.check_setup(stage, stage_name)
@@ -254,7 +239,6 @@ class StageTest(unittest.TestCase):
self.check_chdir_to_source(stage, stage_name)
self.check_destroy(stage, stage_name)
-
def test_restage(self):
with Stage(archive_url, name=stage_name) as stage:
stage.fetch()
@@ -278,20 +262,17 @@ class StageTest(unittest.TestCase):
self.assertFalse('foobar' in os.listdir(stage.source_path))
self.check_destroy(stage, stage_name)
-
def test_no_keep_without_exceptions(self):
with Stage(archive_url, name=stage_name, keep=False) as stage:
pass
self.check_destroy(stage, stage_name)
-
def test_keep_without_exceptions(self):
with Stage(archive_url, name=stage_name, keep=True) as stage:
pass
path = self.get_stage_path(stage, stage_name)
self.assertTrue(os.path.isdir(path))
-
def test_no_keep_with_exceptions(self):
try:
with Stage(archive_url, name=stage_name, keep=False) as stage:
@@ -300,8 +281,7 @@ class StageTest(unittest.TestCase):
path = self.get_stage_path(stage, stage_name)
self.assertTrue(os.path.isdir(path))
except:
- pass # ignore here.
-
+ pass # ignore here.
def test_keep_exceptions(self):
try:
@@ -311,4 +291,4 @@ class StageTest(unittest.TestCase):
path = self.get_stage_path(stage, stage_name)
self.assertTrue(os.path.isdir(path))
except:
- pass # ignore here.
+ pass # ignore here.
diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py
index 0a745a090b..9ef7593ed1 100644
--- a/lib/spack/spack/test/svn_fetch.py
+++ b/lib/spack/spack/test/svn_fetch.py
@@ -94,17 +94,15 @@ class SvnFetchTest(MockPackagesTest):
self.assert_rev(rev)
-
def test_fetch_default(self):
"""Test a default checkout and make sure it's on rev 1"""
self.try_fetch(self.repo.r1, self.repo.r1_file, {
- 'svn' : self.repo.url
+ 'svn': self.repo.url
})
-
def test_fetch_r1(self):
"""Test fetching an older revision (0)."""
self.try_fetch(self.repo.r0, self.repo.r0_file, {
- 'svn' : self.repo.url,
- 'revision' : self.repo.r0
+ 'svn': self.repo.url,
+ 'revision': self.repo.r0
})
diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/test/tally_plugin.py
index 96af1c9b21..808694d186 100644
--- a/lib/spack/spack/test/tally_plugin.py
+++ b/lib/spack/spack/test/tally_plugin.py
@@ -26,6 +26,7 @@ import os
from nose.plugins import Plugin
+
class Tally(Plugin):
name = 'tally'
diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py
index ffd4230f71..ca14dab958 100644
--- a/lib/spack/spack/test/url_extrapolate.py
+++ b/lib/spack/spack/test/url_extrapolate.py
@@ -34,20 +34,21 @@ class UrlExtrapolateTest(unittest.TestCase):
def check_url(self, base, version, new_url):
self.assertEqual(url.substitute_version(base, version), new_url)
-
def test_libelf_version(self):
base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
self.check_url(base, '0.8.13', base)
- self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
- self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
- self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
-
+ self.check_url(
+ base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
+ self.check_url(
+ base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
+ self.check_url(
+ base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
def test_libdwarf_version(self):
base = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
self.check_url(base, '20130729', base)
- self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
-
+ self.check_url(
+ base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
def test_dyninst_version(self):
# Dyninst has a version twice in the URL.
@@ -58,7 +59,6 @@ class UrlExtrapolateTest(unittest.TestCase):
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz")
-
def test_partial_version_prefix(self):
# Test now with a partial prefix earlier in the URL -- this is
# hard to figure out so Spack only substitutes the last
@@ -72,7 +72,6 @@ class UrlExtrapolateTest(unittest.TestCase):
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz")
-
def test_scalasca_partial_version(self):
# Note that this probably doesn't actually work, but sites are
# inconsistent about their directory structure, so it's not
@@ -84,19 +83,16 @@ class UrlExtrapolateTest(unittest.TestCase):
self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
-
def test_mpileaks_version(self):
self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz')
-
def test_gcc(self):
self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7',
'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2')
self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
-
def test_github_raw(self):
self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7',
'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py
index 648996aaaa..6c944a3e7a 100644
--- a/lib/spack/spack/test/url_parse.py
+++ b/lib/spack/spack/test/url_parse.py
@@ -32,11 +32,11 @@ import spack.url as url
class UrlParseTest(unittest.TestCase):
+
def assert_not_detected(self, string):
self.assertRaises(
url.UndetectableVersionError, url.parse_name_and_version, string)
-
def check(self, name, v, string, **kwargs):
# Make sure correct name and version are extracted.
parsed_name, parsed_v = url.parse_name_and_version(string)
@@ -52,7 +52,6 @@ class UrlParseTest(unittest.TestCase):
# build one with a specific version.
self.assertEqual(string, url.substitute_version(string, v))
-
def test_wwwoffle_version(self):
self.check(
'wwwoffle', '2.9h',
@@ -72,7 +71,7 @@ class UrlParseTest(unittest.TestCase):
def test_version_all_dots(self):
self.check(
- 'foo.bar.la', '1.14','http://example.com/foo.bar.la.1.14.zip')
+ 'foo.bar.la', '1.14', 'http://example.com/foo.bar.la.1.14.zip')
def test_version_underscore_separator(self):
self.check(
@@ -286,7 +285,7 @@ class UrlParseTest(unittest.TestCase):
'mvapich2', '1.9',
'http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz')
- def test_mvapich2_19_version(self):
+ def test_mvapich2_20_version(self):
self.check(
'mvapich2', '2.0',
'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz')
diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py
index 9cc04834b6..ea6374e3d2 100644
--- a/lib/spack/spack/test/url_substitution.py
+++ b/lib/spack/spack/test/url_substitution.py
@@ -26,37 +26,31 @@
This test does sanity checks on substituting new versions into URLs
"""
import unittest
-
import spack.url as url
+base = "https://comp.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz"
+stem = "https://comp.llnl.gov/linear_solvers/download/hypre-"
+
+
class PackageSanityTest(unittest.TestCase):
- def test_hypre_url_substitution(self):
- base = "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz"
+ def test_hypre_url_substitution(self):
self.assertEqual(url.substitute_version(base, '2.9.0b'), base)
self.assertEqual(
- url.substitute_version(base, '2.8.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.8.0b.tar.gz")
+ url.substitute_version(base, '2.8.0b'), stem + "2.8.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '2.7.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.7.0b.tar.gz")
+ url.substitute_version(base, '2.7.0b'), stem + "2.7.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '2.6.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.6.0b.tar.gz")
+ url.substitute_version(base, '2.6.0b'), stem + "2.6.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '1.14.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.14.0b.tar.gz")
+ url.substitute_version(base, '1.14.0b'), stem + "1.14.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '1.13.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.13.0b.tar.gz")
+ url.substitute_version(base, '1.13.0b'), stem + "1.13.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '2.0.0'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.0.0.tar.gz")
+ url.substitute_version(base, '2.0.0'), stem + "2.0.0.tar.gz")
self.assertEqual(
- url.substitute_version(base, '1.6.0'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.6.0.tar.gz")
-
+ url.substitute_version(base, '1.6.0'), stem + "1.6.0.tar.gz")
def test_otf2_url_substitution(self):
base = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz"
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index a026403e2e..41d72e7c34 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -43,7 +43,6 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a > b)
self.assertFalse(a >= b)
-
def assert_ver_gt(self, a, b):
a, b = ver(a), ver(b)
self.assertTrue(a > b)
@@ -53,7 +52,6 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a < b)
self.assertFalse(a <= b)
-
def assert_ver_eq(self, a, b):
a, b = ver(a), ver(b)
self.assertFalse(a > b)
@@ -63,55 +61,46 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a < b)
self.assertTrue(a <= b)
-
def assert_in(self, needle, haystack):
self.assertTrue(ver(needle) in ver(haystack))
-
def assert_not_in(self, needle, haystack):
self.assertFalse(ver(needle) in ver(haystack))
-
def assert_canonical(self, canonical_list, version_list):
self.assertEqual(ver(canonical_list), ver(version_list))
-
def assert_overlaps(self, v1, v2):
self.assertTrue(ver(v1).overlaps(ver(v2)))
-
def assert_no_overlap(self, v1, v2):
self.assertFalse(ver(v1).overlaps(ver(v2)))
-
def assert_satisfies(self, v1, v2):
self.assertTrue(ver(v1).satisfies(ver(v2)))
-
def assert_does_not_satisfy(self, v1, v2):
self.assertFalse(ver(v1).satisfies(ver(v2)))
-
def check_intersection(self, expected, a, b):
self.assertEqual(ver(expected), ver(a).intersection(ver(b)))
-
def check_union(self, expected, a, b):
self.assertEqual(ver(expected), ver(a).union(ver(b)))
-
def test_two_segments(self):
self.assert_ver_eq('1.0', '1.0')
self.assert_ver_lt('1.0', '2.0')
self.assert_ver_gt('2.0', '1.0')
-
+ self.assert_ver_eq('develop', 'develop')
+ self.assert_ver_lt('1.0', 'develop')
+ self.assert_ver_gt('develop', '1.0')
def test_three_segments(self):
self.assert_ver_eq('2.0.1', '2.0.1')
self.assert_ver_lt('2.0', '2.0.1')
self.assert_ver_gt('2.0.1', '2.0')
-
def test_alpha(self):
# TODO: not sure whether I like this. 2.0.1a is *usually*
# TODO: less than 2.0.1, but special-casing it makes version
@@ -120,7 +109,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_gt('2.0.1a', '2.0.1')
self.assert_ver_lt('2.0.1', '2.0.1a')
-
def test_patch(self):
self.assert_ver_eq('5.5p1', '5.5p1')
self.assert_ver_lt('5.5p1', '5.5p2')
@@ -129,7 +117,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('5.5p1', '5.5p10')
self.assert_ver_gt('5.5p10', '5.5p1')
-
def test_num_alpha_with_no_separator(self):
self.assert_ver_lt('10xyz', '10.1xyz')
self.assert_ver_gt('10.1xyz', '10xyz')
@@ -137,7 +124,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('xyz10', 'xyz10.1')
self.assert_ver_gt('xyz10.1', 'xyz10')
-
def test_alpha_with_dots(self):
self.assert_ver_eq('xyz.4', 'xyz.4')
self.assert_ver_lt('xyz.4', '8')
@@ -145,30 +131,25 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('xyz.4', '2')
self.assert_ver_gt('2', 'xyz.4')
-
def test_nums_and_patch(self):
self.assert_ver_lt('5.5p2', '5.6p1')
self.assert_ver_gt('5.6p1', '5.5p2')
self.assert_ver_lt('5.6p1', '6.5p1')
self.assert_ver_gt('6.5p1', '5.6p1')
-
def test_rc_versions(self):
self.assert_ver_gt('6.0.rc1', '6.0')
self.assert_ver_lt('6.0', '6.0.rc1')
-
def test_alpha_beta(self):
self.assert_ver_gt('10b2', '10a1')
self.assert_ver_lt('10a2', '10b2')
-
def test_double_alpha(self):
self.assert_ver_eq('1.0aa', '1.0aa')
self.assert_ver_lt('1.0a', '1.0aa')
self.assert_ver_gt('1.0aa', '1.0a')
-
def test_padded_numbers(self):
self.assert_ver_eq('10.0001', '10.0001')
self.assert_ver_eq('10.0001', '10.1')
@@ -176,24 +157,20 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('10.0001', '10.0039')
self.assert_ver_gt('10.0039', '10.0001')
-
def test_close_numbers(self):
self.assert_ver_lt('4.999.9', '5.0')
self.assert_ver_gt('5.0', '4.999.9')
-
def test_date_stamps(self):
self.assert_ver_eq('20101121', '20101121')
self.assert_ver_lt('20101121', '20101122')
self.assert_ver_gt('20101122', '20101121')
-
def test_underscores(self):
self.assert_ver_eq('2_0', '2_0')
self.assert_ver_eq('2.0', '2_0')
self.assert_ver_eq('2_0', '2.0')
-
def test_rpm_oddities(self):
self.assert_ver_eq('1b.fc17', '1b.fc17')
self.assert_ver_lt('1b.fc17', '1.fc17')
@@ -202,7 +179,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_gt('1g.fc17', '1.fc17')
self.assert_ver_lt('1.fc17', '1g.fc17')
-
# Stuff below here is not taken from RPM's tests and is
# unique to spack
def test_version_ranges(self):
@@ -214,7 +190,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('1.2:1.4', '1.5:1.6')
self.assert_ver_gt('1.5:1.6', '1.2:1.4')
-
def test_contains(self):
self.assert_in('1.3', '1.2:1.4')
self.assert_in('1.2.5', '1.2:1.4')
@@ -233,7 +208,6 @@ class VersionsTest(unittest.TestCase):
self.assert_in('1.4.1', '1.2.7:1.4')
self.assert_not_in('1.4.1', '1.2.7:1.4.0')
-
def test_in_list(self):
self.assert_in('1.2', ['1.5', '1.2', '1.3'])
self.assert_in('1.2.5', ['1.5', '1.2:1.3'])
@@ -245,7 +219,6 @@ class VersionsTest(unittest.TestCase):
self.assert_not_in('1.2.5:1.5', ['1.5', '1.2:1.3'])
self.assert_not_in('1.1:1.2.5', ['1.5', '1.2:1.3'])
-
def test_ranges_overlap(self):
self.assert_overlaps('1.2', '1.2')
self.assert_overlaps('1.2.1', '1.2.1')
@@ -262,7 +235,6 @@ class VersionsTest(unittest.TestCase):
self.assert_overlaps(':', '1.6:1.9')
self.assert_overlaps('1.6:1.9', ':')
-
def test_overlap_with_containment(self):
self.assert_in('1.6.5', '1.6')
self.assert_in('1.6.5', ':1.6')
@@ -273,7 +245,6 @@ class VersionsTest(unittest.TestCase):
self.assert_not_in(':1.6', '1.6.5')
self.assert_in('1.6.5', ':1.6')
-
def test_lists_overlap(self):
self.assert_overlaps('1.2b:1.7,5', '1.6:1.9,1')
self.assert_overlaps('1,2,3,4,5', '3,4,5,6,7')
@@ -287,7 +258,6 @@ class VersionsTest(unittest.TestCase):
self.assert_no_overlap('1,2,3,4,5', '6,7')
self.assert_no_overlap('1,2,3,4,5', '6:7')
-
def test_canonicalize_list(self):
self.assert_canonical(['1.2', '1.3', '1.4'],
['1.2', '1.3', '1.3', '1.4'])
@@ -316,7 +286,6 @@ class VersionsTest(unittest.TestCase):
self.assert_canonical([':'],
[':,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
-
def test_intersection(self):
self.check_intersection('2.5',
'1.0:2.5', '2.5:3.0')
@@ -325,12 +294,11 @@ class VersionsTest(unittest.TestCase):
self.check_intersection('0:1', ':', '0:1')
self.check_intersection(['1.0', '2.5:2.7'],
- ['1.0:2.7'], ['2.5:3.0','1.0'])
+ ['1.0:2.7'], ['2.5:3.0', '1.0'])
self.check_intersection(['2.5:2.7'],
- ['1.1:2.7'], ['2.5:3.0','1.0'])
+ ['1.1:2.7'], ['2.5:3.0', '1.0'])
self.check_intersection(['0:1'], [':'], ['0:1'])
-
def test_intersect_with_containment(self):
self.check_intersection('1.6.5', '1.6.5', ':1.6')
self.check_intersection('1.6.5', ':1.6', '1.6.5')
@@ -338,7 +306,6 @@ class VersionsTest(unittest.TestCase):
self.check_intersection('1.6:1.6.5', ':1.6.5', '1.6')
self.check_intersection('1.6:1.6.5', '1.6', ':1.6.5')
-
def test_union_with_containment(self):
self.check_union(':1.6', '1.6.5', ':1.6')
self.check_union(':1.6', ':1.6', '1.6.5')
@@ -346,8 +313,6 @@ class VersionsTest(unittest.TestCase):
self.check_union(':1.6', ':1.6.5', '1.6')
self.check_union(':1.6', '1.6', ':1.6.5')
-
- def test_union_with_containment(self):
self.check_union(':', '1.0:', ':2.0')
self.check_union('1:4', '1:3', '2:4')
@@ -356,7 +321,6 @@ class VersionsTest(unittest.TestCase):
# Tests successor/predecessor case.
self.check_union('1:4', '1:2', '3:4')
-
def test_basic_version_satisfaction(self):
self.assert_satisfies('4.7.3', '4.7.3')
@@ -372,7 +336,6 @@ class VersionsTest(unittest.TestCase):
self.assert_does_not_satisfy('4.8', '4.9')
self.assert_does_not_satisfy('4', '4.9')
-
def test_basic_version_satisfaction_in_lists(self):
self.assert_satisfies(['4.7.3'], ['4.7.3'])
@@ -388,7 +351,6 @@ class VersionsTest(unittest.TestCase):
self.assert_does_not_satisfy(['4.8'], ['4.9'])
self.assert_does_not_satisfy(['4'], ['4.9'])
-
def test_version_range_satisfaction(self):
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_satisfies('4.3.0', '4.3:4.7')
@@ -400,7 +362,6 @@ class VersionsTest(unittest.TestCase):
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
-
def test_version_range_satisfaction_in_lists(self):
self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
self.assert_satisfies(['4.3.0'], ['4.3:4.7'])
@@ -423,3 +384,47 @@ class VersionsTest(unittest.TestCase):
self.assert_satisfies('4.8.0', '4.2, 4.3:4.8')
self.assert_satisfies('4.8.2', '4.2, 4.3:4.8')
+
+ def test_formatted_strings(self):
+ versions = '1.2.3', '1_2_3', '1-2-3'
+ for item in versions:
+ v = Version(item)
+ self.assertEqual(v.dotted, '1.2.3')
+ self.assertEqual(v.dashed, '1-2-3')
+ self.assertEqual(v.underscored, '1_2_3')
+
+ def test_repr_and_str(self):
+
+ def check_repr_and_str(vrs):
+ a = Version(vrs)
+ self.assertEqual(repr(a), 'Version(\'' + vrs + '\')')
+ b = eval(repr(a))
+ self.assertEqual(a, b)
+ self.assertEqual(str(a), vrs)
+ self.assertEqual(str(a), str(b))
+
+ check_repr_and_str('1.2.3')
+ check_repr_and_str('R2016a')
+ check_repr_and_str('R2016a.2-3_4')
+
+ def test_get_item(self):
+ a = Version('0.1_2-3')
+ self.assertTrue(isinstance(a[1], int))
+ # Test slicing
+ b = a[0:2]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('0.1'))
+ self.assertEqual(repr(b), 'Version(\'0.1\')')
+ self.assertEqual(str(b), '0.1')
+ b = a[0:3]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('0.1_2'))
+ self.assertEqual(repr(b), 'Version(\'0.1_2\')')
+ self.assertEqual(str(b), '0.1_2')
+ b = a[1:]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('1_2-3'))
+ self.assertEqual(repr(b), 'Version(\'1_2-3\')')
+ self.assertEqual(str(b), '1_2-3')
+ # Raise TypeError on tuples
+ self.assertRaises(TypeError, b.__getitem__, 1, 2)
diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/yaml.py
index f1b83e7b71..dedbd15d10 100644
--- a/lib/spack/spack/test/yaml.py
+++ b/lib/spack/spack/test/yaml.py
@@ -45,26 +45,25 @@ config_file:
"""
test_data = {
- 'config_file' : syaml.syaml_dict([
+ 'config_file': syaml.syaml_dict([
('x86_64', syaml.syaml_dict([
('foo', '/path/to/foo'),
('bar', '/path/to/bar'),
- ('baz', '/path/to/baz' )])),
- ('some_list', [ 'item 1', 'item 2', 'item 3' ]),
- ('another_list', [ 1, 2, 3 ]),
+ ('baz', '/path/to/baz')])),
+ ('some_list', ['item 1', 'item 2', 'item 3']),
+ ('another_list', [1, 2, 3]),
('some_key', 'some_string')
])}
+
class YamlTest(unittest.TestCase):
def setUp(self):
self.data = syaml.load(test_file)
-
def test_parse(self):
self.assertEqual(test_data, self.data)
-
def test_dict_order(self):
self.assertEqual(
['x86_64', 'some_list', 'another_list', 'some_key'],
@@ -74,7 +73,6 @@ class YamlTest(unittest.TestCase):
['foo', 'bar', 'baz'],
self.data['config_file']['x86_64'].keys())
-
def test_line_numbers(self):
def check(obj, start_line, end_line):
self.assertEqual(obj._start_mark.line, start_line)
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index f678a2dca9..02c9c04380 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -56,12 +56,12 @@ import spack.error
import spack.util.compression as comp
from spack.version import Version
+
#
# Note: We call the input to most of these functions a "path" but the functions
# work on paths and URLs. There's not a good word for both of these, but
# "path" seemed like the most generic term.
#
-
def find_list_url(url):
"""Finds a good list URL for the supplied URL. This depends on
the site. By default, just assumes that a good list URL is the
@@ -71,8 +71,8 @@ def find_list_url(url):
url_types = [
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
- (r'^(https://github.com/[^/]+/[^/]+)/archive/', lambda m: m.group(1) + '/releases')
- ]
+ (r'^(https://github.com/[^/]+/[^/]+)/archive/',
+ lambda m: m.group(1) + '/releases')]
for pattern, fun in url_types:
match = re.search(pattern, url)
@@ -89,8 +89,10 @@ def strip_query_and_fragment(path):
query, frag = components[3:5]
suffix = ''
- if query: suffix += '?' + query
- if frag: suffix += '#' + frag
+ if query:
+ suffix += '?' + query
+ if frag:
+ suffix += '#' + frag
return (urlunsplit(stripped), suffix)
@@ -152,8 +154,10 @@ def downloaded_file_extension(path):
"""
match = re.search(r'github.com/.+/(zip|tar)ball/', path)
if match:
- if match.group(1) == 'zip': return 'zip'
- elif match.group(1) == 'tar': return 'tar.gz'
+ if match.group(1) == 'zip':
+ return 'zip'
+ elif match.group(1) == 'tar':
+ return 'tar.gz'
prefix, ext, suffix = split_url_extension(path)
if not ext:
@@ -193,7 +197,8 @@ def parse_version_offset(path):
(r'[-_](R\d+[AB]\d*(-\d+)?)', path),
# e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz
- # e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
+ # e.g.,
+ # https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
(r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path),
# e.g. boost_1_39_0
@@ -201,7 +206,7 @@ def parse_version_offset(path):
# e.g. foobar-4.5.1-1
# e.g. ruby-1.9.1-p243
- (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem),
+ (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem), # noqa
# e.g. lame-398-1
(r'-((\d)+-\d)', stem),
@@ -275,7 +280,8 @@ def parse_name_offset(path, v=None):
name_types = [
(r'/sourceforge/([^/]+)/', path),
- (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path),
+ (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' %
+ (v, v), path),
(r'/([^/]+)/(tarball|zipball)/', path),
(r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path),
(r'github.com/[^/]+/([^/]+)/archive', path),
@@ -283,7 +289,8 @@ def parse_name_offset(path, v=None):
(r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem
(r'([^/]+)%s' % v, stem),
- (r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem.
+ # accept the path if name is not in stem.
+ (r'/([^/]+)[_.-]v?%s' % v, path),
(r'/([^/]+)%s' % v, path),
(r'^([^/]+)[_.-]v?%s' % v, path),
@@ -326,7 +333,7 @@ def insensitize(string):
return re.sub(r'([a-zA-Z])', to_ins, string)
-def cumsum(elts, init=0, fn=lambda x:x):
+def cumsum(elts, init=0, fn=lambda x: x):
"""Return cumulative sum of result of fn on each element in elts."""
sums = []
s = init
@@ -337,21 +344,20 @@ def cumsum(elts, init=0, fn=lambda x:x):
def substitution_offsets(path):
- """This returns offsets for substituting versions and names in the provided path.
- It is a helper for substitute_version().
+ """This returns offsets for substituting versions and names in the
+ provided path. It is a helper for substitute_version().
"""
# Get name and version offsets
try:
ver, vs, vl = parse_version_offset(path)
name, ns, nl = parse_name_offset(path, ver)
- except UndetectableNameError, e:
+ except UndetectableNameError:
return (None, -1, -1, (), ver, vs, vl, (vs,))
- except UndetectableVersionError, e:
+ except UndetectableVersionError:
return (None, -1, -1, (), None, -1, -1, ())
# protect extensions like bz2 from getting inadvertently
# considered versions.
- ext = comp.extension(path)
path = comp.strip_extension(path)
# Construct a case-insensitive regular expression for the package name.
@@ -449,7 +455,7 @@ def color_url(path, **kwargs):
Cyan: The version found by parse_version_offset().
Red: The name found by parse_name_offset().
- Green: Instances of version string substituted by substitute_version().
+ Green: Instances of version string from substitute_version().
Magenta: Instances of the name (protected from substitution).
Optional args:
@@ -469,31 +475,46 @@ def color_url(path, **kwargs):
nerr = verr = 0
out = StringIO()
for i in range(len(path)):
- if i == vs: out.write('@c'); verr += 1
- elif i == ns: out.write('@r'); nerr += 1
+ if i == vs:
+ out.write('@c')
+ verr += 1
+ elif i == ns:
+ out.write('@r')
+ nerr += 1
elif subs:
- if i in voffs: out.write('@g')
- elif i in noffs: out.write('@m')
+ if i in voffs:
+ out.write('@g')
+ elif i in noffs:
+ out.write('@m')
out.write(path[i])
- if i == vs + vl - 1: out.write('@.'); verr += 1
- elif i == ns + nl - 1: out.write('@.'); nerr += 1
+ if i == vs + vl - 1:
+ out.write('@.')
+ verr += 1
+ elif i == ns + nl - 1:
+ out.write('@.')
+ nerr += 1
elif subs:
if i in vends or i in nends:
out.write('@.')
if errors:
- if nerr == 0: out.write(" @r{[no name]}")
- if verr == 0: out.write(" @r{[no version]}")
- if nerr == 1: out.write(" @r{[incomplete name]}")
- if verr == 1: out.write(" @r{[incomplete version]}")
+ if nerr == 0:
+ out.write(" @r{[no name]}")
+ if verr == 0:
+ out.write(" @r{[no version]}")
+ if nerr == 1:
+ out.write(" @r{[incomplete name]}")
+ if verr == 1:
+ out.write(" @r{[incomplete version]}")
return colorize(out.getvalue())
class UrlParseError(spack.error.SpackError):
"""Raised when the URL module can't parse something correctly."""
+
def __init__(self, msg, path):
super(UrlParseError, self).__init__(msg)
self.path = path
@@ -501,6 +522,7 @@ class UrlParseError(spack.error.SpackError):
class UndetectableVersionError(UrlParseError):
"""Raised when we can't parse a version from a string."""
+
def __init__(self, path):
super(UndetectableVersionError, self).__init__(
"Couldn't detect version in: " + path, path)
@@ -508,6 +530,7 @@ class UndetectableVersionError(UrlParseError):
class UndetectableNameError(UrlParseError):
"""Raised when we can't parse a package name from a string."""
+
def __init__(self, path):
super(UndetectableNameError, self).__init__(
"Couldn't parse package name in: " + path, path)
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index dc1188eb0f..64554ab2f7 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -32,7 +32,9 @@ PRE_EXTS = ["tar"]
EXTS = ["gz", "bz2", "xz", "Z", "zip", "tgz"]
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
-ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS
+ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(
+ PRE_EXTS, EXTS)] + PRE_EXTS + EXTS
+
def allowed_archive(path):
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index 1ae9793518..22777fdb68 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -31,7 +31,7 @@ _acceptable_hashes = [
hashlib.sha224,
hashlib.sha256,
hashlib.sha384,
- hashlib.sha512 ]
+ hashlib.sha512]
"""Index for looking up hasher for a digest."""
_size_to_hash = dict((h().digest_size, h) for h in _acceptable_hashes)
@@ -52,7 +52,6 @@ def checksum(hashlib_algo, filename, **kwargs):
return hasher.hexdigest()
-
class Checker(object):
"""A checker checks files against one particular hex digest.
It will automatically determine what hashing algorithm
@@ -74,25 +73,25 @@ class Checker(object):
adjusting the block_size optional arg. By default it's
a 1MB (2**20 bytes) buffer.
"""
+
def __init__(self, hexdigest, **kwargs):
self.block_size = kwargs.get('block_size', 2**20)
self.hexdigest = hexdigest
self.sum = None
bytes = len(hexdigest) / 2
- if not bytes in _size_to_hash:
+ if bytes not in _size_to_hash:
raise ValueError(
- 'Spack knows no hash algorithm for this digest: %s' % hexdigest)
+ 'Spack knows no hash algorithm for this digest: %s'
+ % hexdigest)
self.hash_fun = _size_to_hash[bytes]
-
@property
def hash_name(self):
"""Get the name of the hash function this Checker is using."""
return self.hash_fun().name
-
def check(self, filename):
"""Read the file with the specified name and check its checksum
against self.hexdigest. Return True if they match, False
diff --git a/lib/spack/spack/util/debug.py b/lib/spack/spack/util/debug.py
index e8a0595416..cf485a611d 100644
--- a/lib/spack/spack/util/debug.py
+++ b/lib/spack/spack/util/debug.py
@@ -33,10 +33,11 @@ import code
import traceback
import signal
+
def debug_handler(sig, frame):
"""Interrupt running process, and provide a python prompt for
interactive debugging."""
- d = {'_frame':frame} # Allow access to frame object.
+ d = {'_frame': frame} # Allow access to frame object.
d.update(frame.f_globals) # Unless shadowed by global
d.update(frame.f_locals)
@@ -48,5 +49,5 @@ def debug_handler(sig, frame):
def register_interrupt_handler():
- """Register a handler to print a stack trace and enter an interpreter on Ctrl-C"""
+ """Print traceback and enter an interpreter on Ctrl-C"""
signal.signal(signal.SIGINT, debug_handler)
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index 38b778fa00..5c27b92df5 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -226,6 +226,7 @@ def which(name, **kwargs):
class ProcessError(spack.error.SpackError):
+
def __init__(self, msg, long_message=None):
# These are used for detailed debugging information for
# package builds. They're built up gradually as the exception
diff --git a/lib/spack/spack/util/multiproc.py b/lib/spack/spack/util/multiproc.py
index 038cd90121..6a25c45713 100644
--- a/lib/spack/spack/util/multiproc.py
+++ b/lib/spack/spack/util/multiproc.py
@@ -32,18 +32,21 @@ from itertools import izip
__all__ = ['spawn', 'parmap', 'Barrier']
+
def spawn(f):
- def fun(pipe,x):
+ def fun(pipe, x):
pipe.send(f(x))
pipe.close()
return fun
-def parmap(f,X):
- pipe=[Pipe() for x in X]
- proc=[Process(target=spawn(f),args=(c,x)) for x,(p,c) in izip(X,pipe)]
+
+def parmap(f, X):
+ pipe = [Pipe() for x in X]
+ proc = [Process(target=spawn(f), args=(c, x))
+ for x, (p, c) in izip(X, pipe)]
[p.start() for p in proc]
[p.join() for p in proc]
- return [p.recv() for (p,c) in pipe]
+ return [p.recv() for (p, c) in pipe]
class Barrier:
@@ -53,6 +56,7 @@ class Barrier:
See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
"""
+
def __init__(self, n, timeout=None):
self.n = n
self.to = timeout
@@ -61,7 +65,6 @@ class Barrier:
self.turnstile1 = Semaphore(0)
self.turnstile2 = Semaphore(1)
-
def wait(self):
if not self.mutex.acquire(timeout=self.to):
raise BarrierTimeoutError()
@@ -90,4 +93,5 @@ class Barrier:
self.turnstile2.release()
-class BarrierTimeoutError: pass
+class BarrierTimeoutError:
+ pass
diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py
index 2d9762942d..9a5cdee411 100644
--- a/lib/spack/spack/util/naming.py
+++ b/lib/spack/spack/util/naming.py
@@ -31,9 +31,15 @@ from StringIO import StringIO
import spack
-__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name',
- 'valid_fully_qualified_module_name', 'validate_fully_qualified_module_name',
- 'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie']
+__all__ = [
+ 'mod_to_class',
+ 'spack_module_to_python_module',
+ 'valid_module_name',
+ 'valid_fully_qualified_module_name',
+ 'validate_fully_qualified_module_name',
+ 'validate_module_name',
+ 'possible_spack_module_names',
+ 'NamespaceTrie']
# Valid module names can contain '-' but can't start with it.
_valid_module_re = r'^\w[\w-]*$'
@@ -67,8 +73,8 @@ def mod_to_class(mod_name):
class_name = string.capwords(class_name, '-')
class_name = class_name.replace('-', '')
- # If a class starts with a number, prefix it with Number_ to make it a valid
- # Python class name.
+ # If a class starts with a number, prefix it with Number_ to make it
+ # a valid Python class name.
if re.match(r'^[0-9]', class_name):
class_name = "_%s" % class_name
@@ -126,6 +132,7 @@ def validate_fully_qualified_module_name(mod_name):
class InvalidModuleNameError(spack.error.SpackError):
"""Raised when we encounter a bad module name."""
+
def __init__(self, name):
super(InvalidModuleNameError, self).__init__(
"Invalid module name: " + name)
@@ -134,6 +141,7 @@ class InvalidModuleNameError(spack.error.SpackError):
class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
"""Raised when we encounter a bad full package name."""
+
def __init__(self, name):
super(InvalidFullyQualifiedModuleNameError, self).__init__(
"Invalid fully qualified package name: " + name)
@@ -141,17 +149,17 @@ class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
class NamespaceTrie(object):
+
class Element(object):
+
def __init__(self, value):
self.value = value
-
def __init__(self, separator='.'):
self._subspaces = {}
self._value = None
self._sep = separator
-
def __setitem__(self, namespace, value):
first, sep, rest = namespace.partition(self._sep)
@@ -164,7 +172,6 @@ class NamespaceTrie(object):
self._subspaces[first][rest] = value
-
def _get_helper(self, namespace, full_name):
first, sep, rest = namespace.partition(self._sep)
if not first:
@@ -176,13 +183,12 @@ class NamespaceTrie(object):
else:
return self._subspaces[first]._get_helper(rest, full_name)
-
def __getitem__(self, namespace):
return self._get_helper(namespace, namespace)
-
def is_prefix(self, namespace):
- """True if the namespace has a value, or if it's the prefix of one that does."""
+ """True if the namespace has a value, or if it's the prefix of one that
+ does."""
first, sep, rest = namespace.partition(self._sep)
if not first:
return True
@@ -191,7 +197,6 @@ class NamespaceTrie(object):
else:
return self._subspaces[first].is_prefix(rest)
-
def is_leaf(self, namespace):
"""True if this namespace has no children in the trie."""
first, sep, rest = namespace.partition(self._sep)
@@ -202,7 +207,6 @@ class NamespaceTrie(object):
else:
return self._subspaces[first].is_leaf(rest)
-
def has_value(self, namespace):
"""True if there is a value set for the given namespace."""
first, sep, rest = namespace.partition(self._sep)
@@ -213,20 +217,17 @@ class NamespaceTrie(object):
else:
return self._subspaces[first].has_value(rest)
-
def __contains__(self, namespace):
"""Returns whether a value has been set for the namespace."""
return self.has_value(namespace)
-
def _str_helper(self, stream, level=0):
indent = (level * ' ')
for name in sorted(self._subspaces):
stream.write(indent + name + '\n')
if self._value:
stream.write(indent + ' ' + repr(self._value.value))
- stream.write(self._subspaces[name]._str_helper(stream, level+1))
-
+ stream.write(self._subspaces[name]._str_helper(stream, level + 1))
def __str__(self):
stream = StringIO()
diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py
index 6d4bcb1039..c36445193c 100644
--- a/lib/spack/spack/util/pattern.py
+++ b/lib/spack/spack/util/pattern.py
@@ -28,42 +28,52 @@ import functools
def composite(interface=None, method_list=None, container=list):
- """
- Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given
- interface.
+ """Returns a class decorator that patches a class adding all the methods
+ it needs to be a composite for a given interface.
- :param interface: class exposing the interface to which the composite object must conform. Only non-private and
- non-special methods will be taken into account
+ :param interface: class exposing the interface to which the composite
+ object must conform. Only non-private and non-special methods will be
+ taken into account
:param method_list: names of methods that should be part of the composite
- :param container: container for the composite object (default = list). Must fulfill the MutableSequence contract.
- The composite class will expose the container API to manage object composition
+ :param container: container for the composite object (default = list).
+ Must fulfill the MutableSequence contract. The composite class will expose
+ the container API to manage object composition
:return: class decorator
"""
- # Check if container fulfills the MutableSequence contract and raise an exception if it doesn't
- # The patched class returned by the decorator will inherit from the container class to expose the
- # interface needed to manage objects composition
+ # Check if container fulfills the MutableSequence contract and raise an
+ # exception if it doesn't. The patched class returned by the decorator will
+ # inherit from the container class to expose the interface needed to manage
+ # objects composition
if not issubclass(container, collections.MutableSequence):
raise TypeError("Container must fulfill the MutableSequence contract")
- # Check if at least one of the 'interface' or the 'method_list' arguments are defined
+ # Check if at least one of the 'interface' or the 'method_list' arguments
+ # are defined
if interface is None and method_list is None:
- raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite")
+ raise TypeError(
+ "Either 'interface' or 'method_list' must be defined on a call "
+ "to composite")
def cls_decorator(cls):
- # Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden
+ # Retrieve the base class of the composite. Inspect its methods and
+ # decide which ones will be overridden
def no_special_no_private(x):
return inspect.ismethod(x) and not x.__name__.startswith('_')
- # Patch the behavior of each of the methods in the previous list. This is done associating an instance of the
- # descriptor below to any method that needs to be patched.
+ # Patch the behavior of each of the methods in the previous list.
+ # This is done associating an instance of the descriptor below to
+ # any method that needs to be patched.
class IterateOver(object):
+ """Decorator used to patch methods in a composite.
+
+ It iterates over all the items in the instance containing the
+ associated attribute and calls for each of them an attribute
+ with the same name
"""
- Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the
- associated attribute and calls for each of them an attribute with the same name
- """
+
def __init__(self, name, func=None):
self.name = name
self.func = func
@@ -72,8 +82,9 @@ def composite(interface=None, method_list=None, container=list):
def getter(*args, **kwargs):
for item in instance:
getattr(item, self.name)(*args, **kwargs)
- # If we are using this descriptor to wrap a method from an interface, then we must conditionally
- # use the `functools.wraps` decorator to set the appropriate fields.
+ # If we are using this descriptor to wrap a method from an
+ # interface, then we must conditionally use the
+ # `functools.wraps` decorator to set the appropriate fields
if self.func is not None:
getter = functools.wraps(self.func)(getter)
return getter
@@ -81,7 +92,8 @@ def composite(interface=None, method_list=None, container=list):
dictionary_for_type_call = {}
# Construct a dictionary with the methods explicitly passed as name
if method_list is not None:
- # python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list}
+ # python@2.7: method_list_dict = {name: IterateOver(name) for name
+ # in method_list}
method_list_dict = {}
for name in method_list:
method_list_dict[name] = IterateOver(name)
@@ -89,28 +101,42 @@ def composite(interface=None, method_list=None, container=list):
# Construct a dictionary with the methods inspected from the interface
if interface is not None:
##########
- # python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)}
+ # python@2.7: interface_methods = {name: method for name, method in
+ # inspect.getmembers(interface, predicate=no_special_no_private)}
interface_methods = {}
- for name, method in inspect.getmembers(interface, predicate=no_special_no_private):
+ for name, method in inspect.getmembers(
+ interface, predicate=no_special_no_private):
interface_methods[name] = method
##########
- # python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()}
+ # python@2.7: interface_methods_dict = {name: IterateOver(name,
+ # method) for name, method in interface_methods.iteritems()}
interface_methods_dict = {}
for name, method in interface_methods.iteritems():
interface_methods_dict[name] = IterateOver(name, method)
##########
dictionary_for_type_call.update(interface_methods_dict)
- # Get the methods that are defined in the scope of the composite class and override any previous definition
+ # Get the methods that are defined in the scope of the composite
+ # class and override any previous definition
##########
- # python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)}
+ # python@2.7: cls_method = {name: method for name, method in
+ # inspect.getmembers(cls, predicate=inspect.ismethod)}
cls_method = {}
- for name, method in inspect.getmembers(cls, predicate=inspect.ismethod):
+ for name, method in inspect.getmembers(
+ cls, predicate=inspect.ismethod):
cls_method[name] = method
##########
dictionary_for_type_call.update(cls_method)
# Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes?
- wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call)
+ wrapper_class = type(cls.__name__, (cls, container),
+ dictionary_for_type_call)
return wrapper_class
return cls_decorator
+
+
+class Bunch(object):
+ """Carries a bunch of named attributes (from Alex Martelli bunch)"""
+
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
diff --git a/lib/spack/spack/util/prefix.py b/lib/spack/spack/util/prefix.py
index e1a0f2958b..985d862269 100644
--- a/lib/spack/spack/util/prefix.py
+++ b/lib/spack/spack/util/prefix.py
@@ -27,6 +27,7 @@ This file contains utilities to help with installing packages.
"""
from llnl.util.filesystem import join_path
+
class Prefix(str):
"""This class represents an installation prefix, but provides useful
attributes for referring to directories inside the prefix.
diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py
index 909f9a57a8..7bcdf2d61f 100644
--- a/lib/spack/spack/util/spack_yaml.py
+++ b/lib/spack/spack/util/spack_yaml.py
@@ -34,7 +34,6 @@
import yaml
from yaml.nodes import *
from yaml.constructor import ConstructorError
-from yaml.representer import SafeRepresenter
from ordereddict_backport import OrderedDict
# Only export load and dump
@@ -42,15 +41,23 @@ __all__ = ['load', 'dump']
# Make new classes so we can add custom attributes.
# Also, use OrderedDict instead of just dict.
+
+
class syaml_dict(OrderedDict):
+
def __repr__(self):
- mappings = ('%r: %r' % (k,v) for k,v in self.items())
+ mappings = ('%r: %r' % (k, v) for k, v in self.items())
return '{%s}' % ', '.join(mappings)
+
+
class syaml_list(list):
__repr__ = list.__repr__
+
+
class syaml_str(str):
__repr__ = str.__repr__
+
def mark(obj, node):
"""Add start and end markers to an object."""
obj._start_mark = node.start_mark
@@ -73,6 +80,7 @@ class OrderedLineLoader(yaml.Loader):
# The standard YAML constructors return empty instances and fill
# in with mappings later. We preserve this behavior.
#
+
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
try:
@@ -83,14 +91,12 @@ class OrderedLineLoader(yaml.Loader):
mark(value, node)
return value
-
def construct_yaml_seq(self, node):
data = syaml_list()
mark(data, node)
yield data
data.extend(self.construct_sequence(node))
-
def construct_yaml_map(self, node):
data = syaml_dict()
mark(data, node)
@@ -104,22 +110,23 @@ class OrderedLineLoader(yaml.Loader):
#
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
- raise ConstructorError(None, None,
- "expected a sequence node, but found %s" % node.id,
- node.start_mark)
- value = syaml_list(self.construct_object(child, deep=deep)
- for child in node.value)
+ raise ConstructorError(
+ None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ value = syaml_list(self.construct_object(child, deep=deep)
+ for child in node.value)
mark(value, node)
return value
-
def construct_mapping(self, node, deep=False):
"""Store mappings as OrderedDicts instead of as regular python
dictionaries to preserve file ordering."""
if not isinstance(node, MappingNode):
- raise ConstructorError(None, None,
- "expected a mapping node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
mapping = syaml_dict()
for key_node, value_node in node.value:
@@ -127,22 +134,26 @@ class OrderedLineLoader(yaml.Loader):
try:
hash(key)
except TypeError, exc:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found unacceptable key (%s)" % exc, key_node.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping", node.start_mark,
+ "found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found already in-use key (%s)" % key, key_node.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping", node.start_mark,
+ "found already in-use key (%s)" % key, key_node.start_mark)
mapping[key] = value
mark(mapping, node)
return mapping
# register above new constructors
-OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
-OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
-OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
-
+OrderedLineLoader.add_constructor(
+ u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
+OrderedLineLoader.add_constructor(
+ u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
+OrderedLineLoader.add_constructor(
+ u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
class OrderedLineDumper(yaml.Dumper):
@@ -154,6 +165,7 @@ class OrderedLineDumper(yaml.Dumper):
regular Python equivalents, instead of ugly YAML pyobjects.
"""
+
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = MappingNode(tag, value, flow_style=flow_style)
@@ -173,7 +185,8 @@ class OrderedLineDumper(yaml.Dumper):
node_value = self.represent_data(item_value)
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
- if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ if not (isinstance(node_value, ScalarNode) and
+ not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py
index ce017b8126..dae7afbf46 100644
--- a/lib/spack/spack/util/string.py
+++ b/lib/spack/spack/util/string.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+
def comma_list(sequence, article=''):
if type(sequence) != list:
sequence = list(sequence)
@@ -32,7 +33,7 @@ def comma_list(sequence, article=''):
elif len(sequence) == 1:
return sequence[0]
else:
- out = ', '.join(str(s) for s in sequence[:-1])
+ out = ', '.join(str(s) for s in sequence[:-1])
if len(sequence) != 2:
out += ',' # oxford comma
out += ' '
@@ -41,6 +42,7 @@ def comma_list(sequence, article=''):
out += str(sequence[-1])
return out
+
def comma_or(sequence):
return comma_list(sequence, 'or')
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 47abc507e0..29ed6e0d32 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -25,8 +25,7 @@
import re
import os
import sys
-import subprocess
-import urllib2, cookielib
+import urllib2
import urlparse
from multiprocessing import Pool
from HTMLParser import HTMLParser, HTMLParseError
@@ -44,6 +43,7 @@ TIMEOUT = 10
class LinkParser(HTMLParser):
"""This parser just takes an HTML page and strips out the hrefs on the
links. Good enough for a really simple spider. """
+
def __init__(self):
HTMLParser.__init__(self)
self.links = []
@@ -84,7 +84,7 @@ def _spider(args):
req.get_method = lambda: "HEAD"
resp = urllib2.urlopen(req, timeout=TIMEOUT)
- if not "Content-type" in resp.headers:
+ if "Content-type" not in resp.headers:
tty.debug("ignoring page " + url)
return pages, links
@@ -109,7 +109,7 @@ def _spider(args):
while link_parser.links:
raw_link = link_parser.links.pop()
- abs_link = urlparse.urljoin(response_url, raw_link)
+ abs_link = urlparse.urljoin(response_url, raw_link.strip())
links.add(abs_link)
@@ -125,11 +125,11 @@ def _spider(args):
if abs_link in visited:
continue
- # If we're not at max depth, follow links.
- if depth < max_depth:
- subcalls.append((abs_link, visited, root, None,
- depth+1, max_depth, raise_on_error))
- visited.add(abs_link)
+ # If we're not at max depth, follow links.
+ if depth < max_depth:
+ subcalls.append((abs_link, visited, root, None,
+ depth + 1, max_depth, raise_on_error))
+ visited.add(abs_link)
if subcalls:
try:
@@ -142,22 +142,22 @@ def _spider(args):
pool.terminate()
pool.join()
- except urllib2.URLError, e:
+ except urllib2.URLError as e:
tty.debug(e)
if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url)
- except HTMLParseError, e:
+ except HTMLParseError as e:
# This error indicates that Python's HTML parser sucks.
msg = "Got an error parsing HTML."
# Pre-2.7.3 Pythons in particular have rather prickly HTML parsing.
- if sys.version_info[:3] < (2,7,3):
+ if sys.version_info[:3] < (2, 7, 3):
msg += " Use Python 2.7.3 or newer for better HTML parsing."
tty.warn(msg, url, "HTMLParseError: " + str(e))
- except Exception, e:
+ except Exception as e:
# Other types of errors are completely ignored, except in debug mode.
tty.debug("Error in _spider: %s" % e)
@@ -173,7 +173,8 @@ def spider(root_url, **kwargs):
performance over a sequential fetch.
"""
max_depth = kwargs.setdefault('depth', 1)
- pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False))
+ pages, links = _spider((root_url, set(), root_url, None,
+ 1, max_depth, False))
return pages, links
@@ -235,7 +236,7 @@ def find_versions_of_archive(*archive_urls, **kwargs):
try:
ver = spack.url.parse_version(url)
versions[ver] = url
- except spack.url.UndetectableVersionError as e:
+ except spack.url.UndetectableVersionError:
continue
return versions
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index ad875f5ef5..b2c1a73489 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -29,8 +29,10 @@ currently variants are just flags.
"""
+
class Variant(object):
"""Represents a variant on a build. Can be either on or off."""
+
def __init__(self, default, description):
self.default = default
self.description = str(description)
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 247f6d2362..e1311eb0d9 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -43,16 +43,19 @@ be called on any of the types::
intersection
concrete
"""
-import os
-import sys
import re
+import numbers
from bisect import bisect_left
from functools import wraps
+
from functools_backport import total_ordering
+__all__ = ['Version', 'VersionRange', 'VersionList', 'ver']
+
# Valid version characters
VALID_VERSION = r'[A-Za-z0-9_.-]'
+
def int_if_int(string):
"""Convert a string to int if possible. Otherwise, return a string."""
try:
@@ -62,10 +65,11 @@ def int_if_int(string):
def coerce_versions(a, b):
- """Convert both a and b to the 'greatest' type between them, in this order:
+ """
+ Convert both a and b to the 'greatest' type between them, in this order:
Version < VersionRange < VersionList
- This is used to simplify comparison operations below so that we're always
- comparing things that are of the same type.
+ This is used to simplify comparison operations below so that we're always
+ comparing things that are of the same type.
"""
order = (Version, VersionRange, VersionList)
ta, tb = type(a), type(b)
@@ -105,6 +109,7 @@ def coerced(method):
@total_ordering
class Version(object):
"""Class to represent versions"""
+
def __init__(self, string):
string = str(string)
@@ -124,6 +129,17 @@ class Version(object):
# last element of separators is ''
self.separators = tuple(re.split(segment_regex, string)[1:-1])
+ @property
+ def dotted(self):
+ return '.'.join(str(x) for x in self.version)
+
+ @property
+ def underscored(self):
+ return '_'.join(str(x) for x in self.version)
+
+ @property
+ def dashed(self):
+ return '-'.join(str(x) for x in self.version)
def up_to(self, index):
"""Return a version string up to the specified component, exclusive.
@@ -131,15 +147,12 @@ class Version(object):
"""
return '.'.join(str(x) for x in self[:index])
-
def lowest(self):
return self
-
def highest(self):
return self
-
@coerced
def satisfies(self, other):
"""A Version 'satisfies' another if it is at least as specific and has a
@@ -147,11 +160,10 @@ class Version(object):
gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
a suitable compiler.
"""
- nself = len(self.version)
+ nself = len(self.version)
nother = len(other.version)
return nother <= nself and self.version[:nother] == other.version
-
def wildcard(self):
"""Create a regex that will match variants of this version string."""
def a_or_n(seg):
@@ -181,28 +193,36 @@ class Version(object):
wc += '(?:[a-z]|alpha|beta)?)?' * (len(segments) - 1)
return wc
-
def __iter__(self):
return iter(self.version)
-
def __getitem__(self, idx):
- return tuple(self.version[idx])
-
+ cls = type(self)
+ if isinstance(idx, numbers.Integral):
+ return self.version[idx]
+ elif isinstance(idx, slice):
+ # Currently len(self.separators) == len(self.version) - 1
+ extendend_separators = self.separators + ('',)
+ string_arg = []
+ for token, sep in zip(self.version, extendend_separators)[idx]:
+ string_arg.append(str(token))
+ string_arg.append(str(sep))
+ string_arg.pop() # We don't need the last separator
+ string_arg = ''.join(string_arg)
+ return cls(string_arg)
+ message = '{cls.__name__} indices must be integers'
+ raise TypeError(message.format(cls=cls))
def __repr__(self):
- return self.string
-
+ return 'Version(' + repr(self.string) + ')'
def __str__(self):
return self.string
-
@property
def concrete(self):
return self
-
@coerced
def __lt__(self, other):
"""Version comparison is designed for consistency with the way RPM
@@ -218,6 +238,14 @@ class Version(object):
if self.version == other.version:
return False
+ # dev is __gt__ than anything but itself.
+ if other.string == 'develop':
+ return True
+
+ # If lhs is dev then it can't be < than anything
+ if self.string == 'develop':
+ return False
+
for a, b in zip(self.version, other.version):
if a == b:
continue
@@ -235,28 +263,23 @@ class Version(object):
# If the common prefix is equal, the one with more segments is bigger.
return len(self.version) < len(other.version)
-
@coerced
def __eq__(self, other):
return (other is not None and
type(other) == Version and self.version == other.version)
-
def __ne__(self, other):
return not (self == other)
-
def __hash__(self):
return hash(self.version)
-
@coerced
def __contains__(self, other):
if other is None:
return False
return other.version[:len(self.version)] == self.version
-
def is_predecessor(self, other):
"""True if the other version is the immediate predecessor of this one.
That is, NO versions v exist such that:
@@ -269,16 +292,13 @@ class Version(object):
ol = other.version[-1]
return type(sl) == int and type(ol) == int and (ol - sl == 1)
-
def is_successor(self, other):
return other.is_predecessor(self)
-
@coerced
def overlaps(self, other):
return self in other or other in self
-
@coerced
def union(self, other):
if self == other or other in self:
@@ -288,7 +308,6 @@ class Version(object):
else:
return VersionList([self, other])
-
@coerced
def intersection(self, other):
if self == other:
@@ -299,6 +318,7 @@ class Version(object):
@total_ordering
class VersionRange(object):
+
def __init__(self, start, end):
if isinstance(start, basestring):
start = Version(start)
@@ -310,15 +330,12 @@ class VersionRange(object):
if start and end and end < start:
raise ValueError("Invalid Version range: %s" % self)
-
def lowest(self):
return self.start
-
def highest(self):
return self.end
-
@coerced
def __lt__(self, other):
"""Sort VersionRanges lexicographically so that they are ordered first
@@ -331,28 +348,24 @@ class VersionRange(object):
s, o = self, other
if s.start != o.start:
- return s.start is None or (o.start is not None and s.start < o.start)
-
+ return s.start is None or (
+ o.start is not None and s.start < o.start)
return (s.end != o.end and
o.end is None or (s.end is not None and s.end < o.end))
-
@coerced
def __eq__(self, other):
return (other is not None and
type(other) == VersionRange and
self.start == other.start and self.end == other.end)
-
def __ne__(self, other):
return not (self == other)
-
@property
def concrete(self):
return self.start if self.start == self.end else None
-
@coerced
def __contains__(self, other):
if other is None:
@@ -373,57 +386,55 @@ class VersionRange(object):
other.end in self.end)))
return in_upper
-
@coerced
def satisfies(self, other):
- """A VersionRange satisfies another if some version in this range
- would satisfy some version in the other range. To do this it must
- either:
- a) Overlap with the other range
- b) The start of this range satisfies the end of the other range.
-
- This is essentially the same as overlaps(), but overlaps assumes
- that its arguments are specific. That is, 4.7 is interpreted as
- 4.7.0.0.0.0... . This funciton assumes that 4.7 woudl be satisfied
- by 4.7.3.5, etc.
-
- Rationale:
- If a user asks for gcc@4.5:4.7, and a package is only compatible with
- gcc@4.7.3:4.8, then that package should be able to build under the
- constraints. Just using overlaps() would not work here.
-
- Note that we don't need to check whether the end of this range
- would satisfy the start of the other range, because overlaps()
- already covers that case.
-
- Note further that overlaps() is a symmetric operation, while
- satisfies() is not.
+ """
+ A VersionRange satisfies another if some version in this range
+ would satisfy some version in the other range. To do this it must
+ either:
+ a) Overlap with the other range
+ b) The start of this range satisfies the end of the other range.
+
+ This is essentially the same as overlaps(), but overlaps assumes
+ that its arguments are specific. That is, 4.7 is interpreted as
+ 4.7.0.0.0.0... . This funciton assumes that 4.7 woudl be satisfied
+ by 4.7.3.5, etc.
+
+ Rationale:
+ If a user asks for gcc@4.5:4.7, and a package is only compatible with
+ gcc@4.7.3:4.8, then that package should be able to build under the
+ constraints. Just using overlaps() would not work here.
+
+ Note that we don't need to check whether the end of this range
+ would satisfy the start of the other range, because overlaps()
+ already covers that case.
+
+ Note further that overlaps() is a symmetric operation, while
+ satisfies() is not.
"""
return (self.overlaps(other) or
# if either self.start or other.end are None, then this can't
# satisfy, or overlaps() would've taken care of it.
self.start and other.end and self.start.satisfies(other.end))
-
@coerced
def overlaps(self, other):
- return ((self.start == None or other.end is None or
+ return ((self.start is None or other.end is None or
self.start <= other.end or
other.end in self.start or self.start in other.end) and
- (other.start is None or self.end == None or
+ (other.start is None or self.end is None or
other.start <= self.end or
other.start in self.end or self.end in other.start))
-
@coerced
def union(self, other):
if not self.overlaps(other):
if (self.end is not None and other.start is not None and
- self.end.is_predecessor(other.start)):
+ self.end.is_predecessor(other.start)):
return VersionRange(self.start, other.end)
if (other.end is not None and self.start is not None and
- other.end.is_predecessor(self.start)):
+ other.end.is_predecessor(self.start)):
return VersionRange(other.start, self.end)
return VersionList([self, other])
@@ -442,13 +453,12 @@ class VersionRange(object):
else:
end = self.end
# TODO: See note in intersection() about < and in discrepancy.
- if not other.end in self.end:
+ if other.end not in self.end:
if end in other.end or other.end > self.end:
end = other.end
return VersionRange(start, end)
-
@coerced
def intersection(self, other):
if self.overlaps(other):
@@ -470,7 +480,7 @@ class VersionRange(object):
# 1.6 < 1.6.5 = True (lexicographic)
# Should 1.6 NOT be less than 1.6.5? Hm.
# Here we test (not end in other.end) first to avoid paradox.
- if other.end is not None and not end in other.end:
+ if other.end is not None and end not in other.end:
if other.end < end or other.end in end:
end = other.end
@@ -479,15 +489,12 @@ class VersionRange(object):
else:
return VersionList()
-
def __hash__(self):
return hash((self.start, self.end))
-
def __repr__(self):
return self.__str__()
-
def __str__(self):
out = ""
if self.start:
@@ -501,6 +508,7 @@ class VersionRange(object):
@total_ordering
class VersionList(object):
"""Sorted, non-redundant list of Versions and VersionRanges."""
+
def __init__(self, vlist=None):
self.versions = []
if vlist is not None:
@@ -515,7 +523,6 @@ class VersionList(object):
for v in vlist:
self.add(ver(v))
-
def add(self, version):
if type(version) in (Version, VersionRange):
# This normalizes single-value version ranges.
@@ -524,9 +531,9 @@ class VersionList(object):
i = bisect_left(self, version)
- while i-1 >= 0 and version.overlaps(self[i-1]):
- version = version.union(self[i-1])
- del self.versions[i-1]
+ while i - 1 >= 0 and version.overlaps(self[i - 1]):
+ version = version.union(self[i - 1])
+ del self.versions[i - 1]
i -= 1
while i < len(self) and version.overlaps(self[i]):
@@ -542,7 +549,6 @@ class VersionList(object):
else:
raise TypeError("Can't add %s to VersionList" % type(version))
-
@property
def concrete(self):
if len(self) == 1:
@@ -550,11 +556,9 @@ class VersionList(object):
else:
return None
-
def copy(self):
return VersionList(self)
-
def lowest(self):
"""Get the lowest version in the list."""
if not self:
@@ -562,7 +566,6 @@ class VersionList(object):
else:
return self[0].lowest()
-
def highest(self):
"""Get the highest version in the list."""
if not self:
@@ -570,7 +573,6 @@ class VersionList(object):
else:
return self[-1].highest()
-
@coerced
def overlaps(self, other):
if not other or not self:
@@ -586,14 +588,12 @@ class VersionList(object):
o += 1
return False
-
def to_dict(self):
"""Generate human-readable dict for YAML."""
if self.concrete:
- return { 'version' : str(self[0]) }
+ return {'version': str(self[0])}
else:
- return { 'versions' : [str(v) for v in self] }
-
+ return {'versions': [str(v) for v in self]}
@staticmethod
def from_dict(dictionary):
@@ -605,7 +605,6 @@ class VersionList(object):
else:
raise ValueError("Dict must have 'version' or 'versions' in it.")
-
@coerced
def satisfies(self, other, strict=False):
"""A VersionList satisfies another if some version in the list
@@ -633,20 +632,17 @@ class VersionList(object):
o += 1
return False
-
@coerced
def update(self, other):
for v in other.versions:
self.add(v)
-
@coerced
def union(self, other):
result = self.copy()
result.update(other)
return result
-
@coerced
def intersection(self, other):
# TODO: make this faster. This is O(n^2).
@@ -656,7 +652,6 @@ class VersionList(object):
result.add(s.intersection(o))
return result
-
@coerced
def intersect(self, other):
"""Intersect this spec's list with other.
@@ -678,50 +673,40 @@ class VersionList(object):
if i == 0:
if version not in self[0]:
return False
- elif all(version not in v for v in self[i-1:]):
+ elif all(version not in v for v in self[i - 1:]):
return False
return True
-
def __getitem__(self, index):
return self.versions[index]
-
def __iter__(self):
return iter(self.versions)
-
def __reversed__(self):
return reversed(self.versions)
-
def __len__(self):
return len(self.versions)
-
@coerced
def __eq__(self, other):
return other is not None and self.versions == other.versions
-
def __ne__(self, other):
return not (self == other)
-
@coerced
def __lt__(self, other):
return other is not None and self.versions < other.versions
-
def __hash__(self):
return hash(tuple(self.versions))
-
def __str__(self):
return ",".join(str(v) for v in self.versions)
-
def __repr__(self):
return str(self.versions)
@@ -730,7 +715,7 @@ def _string_to_version(string):
"""Converts a string to a Version, VersionList, or VersionRange.
This is private. Client code should use ver().
"""
- string = string.replace(' ','')
+ string = string.replace(' ', '')
if ',' in string:
return VersionList(string.split(','))
@@ -738,7 +723,7 @@ def _string_to_version(string):
elif ':' in string:
s, e = string.split(':')
start = Version(s) if s else None
- end = Version(e) if e else None
+ end = Version(e) if e else None
return VersionRange(start, end)
else:
diff --git a/lib/spack/spack/yaml_version_check.py b/lib/spack/spack/yaml_version_check.py
new file mode 100644
index 0000000000..2c5b511d7f
--- /dev/null
+++ b/lib/spack/spack/yaml_version_check.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Yaml Version Check is a module for ensuring that config file
+formats are compatible with the current version of Spack."""
+import os.path
+import os
+import llnl.util.tty as tty
+import spack.util.spack_yaml as syaml
+import spack.config
+
+
+def check_yaml_versions():
+ check_compiler_yaml_version()
+
+
+def check_compiler_yaml_version():
+ config_scopes = spack.config.config_scopes
+ for scope in config_scopes.values():
+ file_name = os.path.join(scope.path, 'compilers.yaml')
+ data = None
+ if os.path.isfile(file_name):
+ with open(file_name) as f:
+ data = syaml.load(f)
+
+ if data:
+ compilers = data['compilers']
+ if len(compilers) > 0:
+ if (not isinstance(compilers, list) or
+ 'operating_system' not in compilers[0]['compiler']):
+ new_file = os.path.join(scope.path, '_old_compilers.yaml')
+ tty.warn('%s in out of date compilers format. '
+ 'Moved to %s. Spack automatically generate '
+ 'a compilers config file '
+ % (file_name, new_file))
+ os.rename(file_name, new_file)