summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst31
-rw-r--r--lib/spack/docs/build_settings.rst33
-rw-r--r--lib/spack/docs/build_systems/cmakepackage.rst17
-rw-r--r--lib/spack/docs/build_systems/intelpackage.rst6
-rw-r--r--lib/spack/docs/build_systems/sippackage.rst12
-rw-r--r--lib/spack/docs/containers.rst8
-rw-r--r--lib/spack/docs/contribution_guide.rst4
-rw-r--r--lib/spack/docs/environments.rst2
-rw-r--r--lib/spack/docs/getting_started.rst2
-rw-r--r--lib/spack/docs/module_file_support.rst2
-rw-r--r--lib/spack/docs/packaging_guide.rst31
-rw-r--r--lib/spack/docs/pipelines.rst26
-rw-r--r--lib/spack/docs/workflows.rst12
-rwxr-xr-xlib/spack/env/cc2
-rw-r--r--lib/spack/external/__init__.py10
-rw-r--r--lib/spack/external/_pytest/config.py38
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy.py20
-rw-r--r--lib/spack/external/altgraph/__init__.py9
-rw-r--r--lib/spack/external/distro.py1
-rw-r--r--lib/spack/spack/binary_distribution.py280
-rw-r--r--lib/spack/spack/build_systems/autotools.py6
-rw-r--r--lib/spack/spack/build_systems/cmake.py114
-rw-r--r--lib/spack/spack/build_systems/cuda.py80
-rw-r--r--lib/spack/spack/build_systems/sip.py48
-rw-r--r--lib/spack/spack/build_systems/sourceware.py37
-rw-r--r--lib/spack/spack/build_systems/xorg.py37
-rw-r--r--lib/spack/spack/caches.py5
-rw-r--r--lib/spack/spack/ci.py5
-rw-r--r--lib/spack/spack/cmd/__init__.py10
-rw-r--r--lib/spack/spack/cmd/buildcache.py126
-rw-r--r--lib/spack/spack/cmd/checksum.py3
-rw-r--r--lib/spack/spack/cmd/create.py4
-rw-r--r--lib/spack/spack/cmd/debug.py18
-rw-r--r--lib/spack/spack/cmd/dependencies.py22
-rw-r--r--lib/spack/spack/cmd/dependents.py2
-rw-r--r--lib/spack/spack/cmd/load.py15
-rw-r--r--lib/spack/spack/cmd/mirror.py8
-rw-r--r--lib/spack/spack/cmd/python.py9
-rw-r--r--lib/spack/spack/cmd/repo.py25
-rw-r--r--lib/spack/spack/compiler.py8
-rw-r--r--lib/spack/spack/compilers/arm.py16
-rw-r--r--lib/spack/spack/compilers/fj.py4
-rw-r--r--lib/spack/spack/config.py72
-rw-r--r--lib/spack/spack/fetch_strategy.py41
-rw-r--r--lib/spack/spack/mirror.py29
-rw-r--r--lib/spack/spack/package.py50
-rw-r--r--lib/spack/spack/package_prefs.py78
-rw-r--r--lib/spack/spack/patch.py9
-rw-r--r--lib/spack/spack/pkgkit.py2
-rw-r--r--lib/spack/spack/provider_index.py307
-rw-r--r--lib/spack/spack/relocate.py792
-rw-r--r--lib/spack/spack/repo.py52
-rw-r--r--lib/spack/spack/schema/config.py1
-rw-r--r--lib/spack/spack/spec.py2
-rw-r--r--lib/spack/spack/stage.py33
-rw-r--r--lib/spack/spack/test/build_systems.py107
-rw-r--r--lib/spack/spack/test/cmd/buildcache.py15
-rw-r--r--lib/spack/spack/test/cmd/debug.py14
-rw-r--r--lib/spack/spack/test/cmd/dependencies.py4
-rw-r--r--lib/spack/spack/test/cmd/list.py8
-rw-r--r--lib/spack/spack/test/cmd/load.py15
-rw-r--r--lib/spack/spack/test/cmd/mirror.py23
-rw-r--r--lib/spack/spack/test/cmd/python.py7
-rw-r--r--lib/spack/spack/test/cmd/repo.py35
-rw-r--r--lib/spack/spack/test/compilers.py8
-rw-r--r--lib/spack/spack/test/concretize_preferences.py51
-rw-r--r--lib/spack/spack/test/config.py87
-rw-r--r--lib/spack/spack/test/conftest.py20
-rw-r--r--lib/spack/spack/test/mirror.py92
-rw-r--r--lib/spack/spack/test/module_parsing.py30
-rw-r--r--lib/spack/spack/test/package_class.py66
-rw-r--r--lib/spack/spack/test/packages.py21
-rw-r--r--lib/spack/spack/test/packaging.py571
-rw-r--r--lib/spack/spack/test/relocate.py59
-rw-r--r--lib/spack/spack/test/spec_dag.py4
-rw-r--r--lib/spack/spack/test/url_fetch.py19
-rw-r--r--lib/spack/spack/util/compression.py3
-rw-r--r--lib/spack/spack/util/executable.py2
-rw-r--r--lib/spack/spack/util/module_cmd.py9
-rw-r--r--lib/spack/spack/util/web.py2
80 files changed, 2571 insertions, 1317 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 56d60a29da..8acebeb0e6 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -25,6 +25,14 @@ It is recommended that the following be put in your ``.bashrc`` file:
alias less='less -R'
+If you do not see colorized output when using ``less -R`` it is because color
+is being disabled in the piped output. In this case, tell spack to force
+colorized output.
+
+.. code-block:: console
+
+ $ spack --color always | less -R
+
--------------------------
Listing available packages
--------------------------
@@ -45,7 +53,7 @@ can install:
.. command-output:: spack list
:ellipsis: 10
-There are thosands of them, so we've truncated the output above, but you
+There are thousands of them, so we've truncated the output above, but you
can find a :ref:`full list here <package-list>`.
Packages are listed by name in alphabetical order.
A pattern to match with no wildcards, ``*`` or ``?``,
@@ -267,7 +275,7 @@ the ``spack gc`` ("garbage collector") command, which will uninstall all unneede
-- linux-ubuntu18.04-broadwell / gcc@9.0.1 ----------------------
hdf5@1.10.5 libiconv@1.16 libpciaccess@0.13.5 libszip@2.1.1 libxml2@2.9.9 mpich@3.3.2 openjpeg@2.3.1 xz@5.2.4 zlib@1.2.11
-In the example above Spack went through all the packages in the DB
+In the example above Spack went through all the packages in the package database
and removed everything that is not either:
1. A package installed upon explicit request of the user
@@ -854,7 +862,7 @@ Variants are named options associated with a particular package. They are
optional, as each package must provide default values for each variant it
makes available. Variants can be specified using
a flexible parameter syntax ``name=<value>``. For example,
-``spack install libelf debug=True`` will install libelf build with debug
+``spack install libelf debug=True`` will install libelf built with debug
flags. The names of particular variants available for a package depend on
what was provided by the package author. ``spack info <package>`` will
provide information on what build variants are available.
@@ -917,7 +925,7 @@ contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``,
``cppflags='-O3'``, and ``cppflags="-O3 -fPIC"`` are acceptable, but
``cppflags=-O3 -fPIC`` is not. Additionally, if the value of the
compiler flags is not the last thing on the line, it must be followed
-by a space. The commmand ``spack install libelf cppflags="-O3"%intel``
+by a space. The command ``spack install libelf cppflags="-O3"%intel``
will be interpreted as an attempt to set ``cppflags="-O3%intel"``.
The six compiler flags are injected in the order of implicit make commands
@@ -1067,13 +1075,13 @@ of failing:
In the snippet above, for instance, the microarchitecture was demoted to ``haswell`` when
compiling with ``gcc@4.8`` since support to optimize for ``broadwell`` starts from ``gcc@4.9:``.
-Finally if Spack has no information to match compiler and target, it will
+Finally, if Spack has no information to match compiler and target, it will
proceed with the installation but avoid injecting any microarchitecture
specific flags.
.. warning::
- Currently Spack doesn't print any warning to the user if it has no information
+ Currently, Spack doesn't print any warning to the user if it has no information
on which optimization flags should be used for a given compiler. This behavior
might change in the future.
@@ -1083,7 +1091,7 @@ specific flags.
Virtual dependencies
--------------------
-The dependence graph for ``mpileaks`` we saw above wasn't *quite*
+The dependency graph for ``mpileaks`` we saw above wasn't *quite*
accurate. ``mpileaks`` uses MPI, which is an interface that has many
different implementations. Above, we showed ``mpileaks`` and
``callpath`` depending on ``mpich``, which is one *particular*
@@ -1226,6 +1234,8 @@ add a version specifier to the spec:
Notice that the package versions that provide insufficient MPI
versions are now filtered out.
+.. _extensions:
+
---------------------------
Extensions & Python support
---------------------------
@@ -1233,8 +1243,7 @@ Extensions & Python support
Spack's installation model assumes that each package will live in its
own install prefix. However, certain packages are typically installed
*within* the directory hierarchy of other packages. For example,
-modules in interpreted languages like `Python
-<https://www.python.org>`_ are typically installed in the
+`Python <https://www.python.org>`_ packages are typically installed in the
``$prefix/lib/python-2.7/site-packages`` directory.
Spack has support for this type of installation as well. In Spack,
@@ -1410,12 +1419,12 @@ packages listed as activated:
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
Now, when a user runs python, ``numpy`` will be available for import
-*without* the user having to explicitly loaded. ``python@2.7.8`` now
+*without* the user having to explicitly load it. ``python@2.7.8`` now
acts like a system Python installation with ``numpy`` installed inside
of it.
Spack accomplishes this by symbolically linking the *entire* prefix of
-the ``py-numpy`` into the prefix of the ``python`` package. To the
+the ``py-numpy`` package into the prefix of the ``python`` package. To the
python interpreter, it looks like ``numpy`` is installed in the
``site-packages`` directory.
diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst
index b141f2b717..cfd850af28 100644
--- a/lib/spack/docs/build_settings.rst
+++ b/lib/spack/docs/build_settings.rst
@@ -124,6 +124,39 @@ The ``buildable`` does not need to be paired with external packages.
It could also be used alone to forbid packages that may be
buggy or otherwise undesirable.
+Virtual packages in Spack can also be specified as not buildable, and
+external implementations can be provided. In the example above,
+OpenMPI is configured as not buildable, but Spack will often prefer
+other MPI implementations over the externally available OpenMPI. Spack
+can be configured with every MPI provider not buildable individually,
+but more conveniently:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ buildable: False
+ openmpi:
+ paths:
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
+
+Implementations can also be listed immediately under the virtual they provide:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ buildable: False
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-debian7-x86_64+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-debian7-x86_64: /opt/openmpi-1.6.5-intel
+ mpich@3.3 %clang@9.0.0 arch=linux-debian7-x86_64: /opt/mpich-3.3-intel
+
+Spack can then use any of the listed external implementations of MPI
+to satisfy a dependency, and will choose depending on the compiler and
+architecture.
.. _concretization-preferences:
diff --git a/lib/spack/docs/build_systems/cmakepackage.rst b/lib/spack/docs/build_systems/cmakepackage.rst
index 0a771edad3..76e89c80b1 100644
--- a/lib/spack/docs/build_systems/cmakepackage.rst
+++ b/lib/spack/docs/build_systems/cmakepackage.rst
@@ -128,17 +128,20 @@ Adding flags to cmake
^^^^^^^^^^^^^^^^^^^^^
To add additional flags to the ``cmake`` call, simply override the
-``cmake_args`` function:
+``cmake_args`` function. The following example defines values for the flags
+``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
+and without the :py:meth:`~.CMakePackage.define` and
+:py:meth:`~.CMakePackage.define_from_variant` helper functions:
.. code-block:: python
def cmake_args(self):
- args = []
-
- if '+hdf5' in self.spec:
- args.append('-DDETECT_HDF5=ON')
- else:
- args.append('-DDETECT_HDF5=OFF')
+ args = [
+ '-DWHATEVER:STRING=somevalue',
+ self.define('ENABLE_BROKEN_FEATURE', False),
+ self.define_from_variant('DETECT_HDF5', 'hdf5'),
+ self.define_from_variant('THREADS'), # True if +threads
+ ]
return args
diff --git a/lib/spack/docs/build_systems/intelpackage.rst b/lib/spack/docs/build_systems/intelpackage.rst
index c14fc34f51..153b41bdc7 100644
--- a/lib/spack/docs/build_systems/intelpackage.rst
+++ b/lib/spack/docs/build_systems/intelpackage.rst
@@ -553,7 +553,7 @@ follow `the next section <intel-install-libs_>`_ instead.
f77: stub
fc: stub
- Replace ``18.0.3`` with the version that you determined in the preceeding
+ Replace ``18.0.3`` with the version that you determined in the preceding
step. The contents under ``paths:`` do not matter yet.
You are right to ask: "Why on earth is that necessary?" [fn8]_.
@@ -696,7 +696,7 @@ follow `the next section <intel-install-libs_>`_ instead.
- /home/$user/spack-stage
Do not duplicate the ``config:`` line if it already is present.
- Adapt the location, which here is the same as in the preceeding example.
+ Adapt the location, which here is the same as in the preceding example.
3. Retry installing the large package.
@@ -965,7 +965,7 @@ a *virtual* ``mkl`` package is declared in Spack.
Likewise, in a
:ref:`MakefilePackage <makefilepackage>`
- or similiar package that does not use AutoTools you may need to provide include
+ or similar package that does not use AutoTools you may need to provide include
and link options for use on command lines or in environment variables.
For example, to generate an option string of the form ``-I<dir>``, use:
diff --git a/lib/spack/docs/build_systems/sippackage.rst b/lib/spack/docs/build_systems/sippackage.rst
index b8c08ec513..ddf9a26ab9 100644
--- a/lib/spack/docs/build_systems/sippackage.rst
+++ b/lib/spack/docs/build_systems/sippackage.rst
@@ -51,10 +51,8 @@ Build system dependencies
``SIPPackage`` requires several dependencies. Python is needed to run
the ``configure.py`` build script, and to run the resulting Python
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
-needed to build the package. SIP is an unusual dependency in that it
-must be installed in the same installation directory as the package,
-so instead of a ``depends_on``, we use a ``resource``. All of these
-dependencies are automatically added via the base class
+needed to build the package. All of these dependencies are automatically
+added via the base class
.. code-block:: python
@@ -62,11 +60,7 @@ dependencies are automatically added via the base class
depends_on('qt', type='build')
- resource(name='sip',
- url='https://www.riverbankcomputing.com/static/Downloads/sip/4.19.18/sip-4.19.18.tar.gz',
- sha256='c0bd863800ed9b15dcad477c4017cdb73fa805c25908b0240564add74d697e1e',
- destination='.')
-
+ depends_on('py-sip', type='build')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Passing arguments to ``configure.py``
diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst
index bbb21a2e00..2ca25b7207 100644
--- a/lib/spack/docs/containers.rst
+++ b/lib/spack/docs/containers.rst
@@ -44,7 +44,7 @@ Environments:
&& echo " install_tree: /opt/software" \
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
- # Install the software, remove unecessary deps
+ # Install the software, remove unnecessary deps
RUN cd /opt/spack-environment && spack install && spack gc -y
# Strip all the binaries
@@ -108,7 +108,7 @@ are currently supported are summarized in the table below:
- ``ubuntu:16.04``
- ``spack/ubuntu-xenial``
* - Ubuntu 18.04
- - ``ubuntu:16.04``
+ - ``ubuntu:18.04``
- ``spack/ubuntu-bionic``
* - CentOS 6
- ``centos:6``
@@ -266,7 +266,7 @@ following ``Dockerfile``:
&& echo " install_tree: /opt/software" \
&& echo " view: /opt/view") > /opt/spack-environment/spack.yaml
- # Install the software, remove unecessary deps
+ # Install the software, remove unnecessary deps
RUN cd /opt/spack-environment && spack install && spack gc -y
# Strip all the binaries
@@ -304,4 +304,4 @@ following ``Dockerfile``:
.. note::
Spack can also produce Singularity definition files to build the image. The
minimum version of Singularity required to build a SIF (Singularity Image Format)
- from them is ``3.5.3``. \ No newline at end of file
+ from them is ``3.5.3``.
diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst
index 0b79141ee3..9935ec0c83 100644
--- a/lib/spack/docs/contribution_guide.rst
+++ b/lib/spack/docs/contribution_guide.rst
@@ -385,8 +385,8 @@ coverage. This helps us tell what percentage of lines of code in Spack are
covered by unit tests. Although code covered by unit tests can still contain
bugs, it is much less error prone than code that is not covered by unit tests.
-Codecov provides `browser extensions <https://github.com/codecov/browser-extension>`_
-for Google Chrome, Firefox, and Opera. These extensions integrate with GitHub
+Codecov provides `browser extensions <https://github.com/codecov/sourcegraph-codecov>`_
+for Google Chrome and Firefox. These extensions integrate with GitHub
and allow you to see coverage line-by-line when viewing the Spack repository.
If you are new to Spack, a great way to get started is to write unit tests to
increase coverage!
diff --git a/lib/spack/docs/environments.rst b/lib/spack/docs/environments.rst
index 5ec1ec9032..5d091dee30 100644
--- a/lib/spack/docs/environments.rst
+++ b/lib/spack/docs/environments.rst
@@ -285,7 +285,7 @@ be called using the ``spack -E`` flag to specify the environment.
.. code-block:: console
- $ spack activate myenv
+ $ spack env activate myenv
$ spack add mpileaks
or
diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst
index 3a9b2d42b2..8de9d27662 100644
--- a/lib/spack/docs/getting_started.rst
+++ b/lib/spack/docs/getting_started.rst
@@ -851,7 +851,7 @@ from websites and from git.
.. warning::
- This workaround should be used ONLY as a last resort! Wihout SSL
+ This workaround should be used ONLY as a last resort! Without SSL
certificate verification, spack and git will download from sites you
wouldn't normally trust. The code you download and run may then be
compromised! While this is not a major issue for archives that will
diff --git a/lib/spack/docs/module_file_support.rst b/lib/spack/docs/module_file_support.rst
index aa7eb57653..01a13cdf3e 100644
--- a/lib/spack/docs/module_file_support.rst
+++ b/lib/spack/docs/module_file_support.rst
@@ -165,8 +165,6 @@ used ``gcc``. You could therefore just type:
To identify just the one built with the Intel compiler.
-.. _extensions:
-
.. _cmd-spack-module-loads:
^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 41d74fcc74..26f843aa0e 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -2197,7 +2197,7 @@ property to ``True``, e.g.:
extendable = True
...
-To make a package into an extension, simply add simply add an
+To make a package into an extension, simply add an
``extends`` call in the package definition, and pass it the name of an
extendable package:
@@ -2212,6 +2212,10 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
activate``. When it is activated, all the files in its prefix will be
symbolically linked into the prefix of the python package.
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Adding additional constraints
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
Some packages produce a Python extension, but are only compatible with
Python 3, or with Python 2. In those cases, a ``depends_on()``
declaration should be made in addition to the ``extends()``
@@ -2231,8 +2235,7 @@ variant(s) are selected. This may be accomplished with conditional
.. code-block:: python
class FooLib(Package):
- variant('python', default=True, description= \
- 'Build the Python extension Module')
+ variant('python', default=True, description='Build the Python extension Module')
extends('python', when='+python')
...
@@ -2913,7 +2916,7 @@ discover its dependencies.
If you want to see the environment that a package will build with, or
if you want to run commands in that environment to test them out, you
-can use the :ref:`cmd-spack-env` command, documented
+can use the :ref:`cmd-spack-build-env` command, documented
below.
^^^^^^^^^^^^^^^^^^^^^
@@ -3607,7 +3610,7 @@ the command line.
For most compilers, ``$rpath_flag`` is ``-Wl,-rpath,``. However, NAG
passes its flags to GCC instead of passing them directly to the linker.
Therefore, its ``$rpath_flag`` is doubly wrapped: ``-Wl,-Wl,,-rpath,``.
- ``$rpath_flag`` can be overriden on a compiler specific basis in
+ ``$rpath_flag`` can be overridden on a compiler specific basis in
``lib/spack/spack/compilers/$compiler.py``.
The compiler wrappers also pass the compiler flags specified by the user from
@@ -4332,31 +4335,31 @@ directory, install directory, package directory) and others change to
core spack locations. For example, ``spack cd --module-dir`` will take you to
the main python source directory of your spack install.
-.. _cmd-spack-env:
+.. _cmd-spack-build-env:
-^^^^^^^^^^^^^
-``spack env``
-^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^
+``spack build-env``
+^^^^^^^^^^^^^^^^^^^
-``spack env`` functions much like the standard unix ``env`` command,
-but it takes a spec as an argument. You can use it to see the
+``spack build-env`` functions much like the standard unix ``build-env``
+command, but it takes a spec as an argument. You can use it to see the
environment variables that will be set when a particular build runs,
for example:
.. code-block:: console
- $ spack env mpileaks@1.1%intel
+ $ spack build-env mpileaks@1.1%intel
This will display the entire environment that will be set when the
``mpileaks@1.1%intel`` build runs.
To run commands in a package's build environment, you can simply
-provide them after the spec argument to ``spack env``:
+provide them after the spec argument to ``spack build-env``:
.. code-block:: console
$ spack cd mpileaks@1.1%intel
- $ spack env mpileaks@1.1%intel ./configure
+ $ spack build-env mpileaks@1.1%intel ./configure
This will cd to the build directory and then run ``configure`` in the
package's build environment.
diff --git a/lib/spack/docs/pipelines.rst b/lib/spack/docs/pipelines.rst
index f70b39a16d..0f092d5c5c 100644
--- a/lib/spack/docs/pipelines.rst
+++ b/lib/spack/docs/pipelines.rst
@@ -117,6 +117,27 @@ created has the same name as the current branch being tested, but has ``multi-ci
prepended to the branch name. Once Gitlab CI has full support for dynamically
defined workloads, this command will be deprecated.
+Until this command is no longer needed and can be deprecated, there are
+a few gotchas to note. While you can embed your username and password in the
+`DOWNSTREAM_CI_REPO` url, you may not be able to have Gitlab mask the value, as
+it will likely contain characters that Gitlab cannot currently mask. Another
+option is to set up an SSH token, but for this to work, the associated SSH
+key must be passphrase-less so that it can be provided in an automated manner.
+
+If you attempt to set up an SSH token that does require a passphrase, you may
+see a log message similar to:
+
+```
+fatal: https://<instance-url>/<org>/<project>:<port>/info/refs not valid: is this a git repository?
+```
+
+In this case, you can try a passphrase-less SSH key, or else embed your gitlab
+username and password in the `DOWNSTREAM_CI_REPO` as in the following example:
+
+```
+https://<username>:<password>@<instance-url>/<org>/<project>.git
+```
+
.. _cmd_spack_ci_rebuild:
^^^^^^^^^^^^^^^^^^^^
@@ -132,7 +153,7 @@ A pipeline-enabled spack environment
------------------------------------
Here's an example of a spack environment file that has been enhanced with
-sections desribing a build pipeline:
+sections describing a build pipeline:
.. code-block:: yaml
@@ -436,4 +457,5 @@ DOWNSTREAM_CI_REPO
^^^^^^^^^^^^^^^^^^
Needed until Gitlab CI supports dynamic job generation. Can contain connection
-credentials, and could be the same repository or a different one.
+credentials embedded in the url, and could be the same repository or a different
+one.
diff --git a/lib/spack/docs/workflows.rst b/lib/spack/docs/workflows.rst
index b329a0205c..9ce664b6ca 100644
--- a/lib/spack/docs/workflows.rst
+++ b/lib/spack/docs/workflows.rst
@@ -284,8 +284,10 @@ have some drawbacks:
The ``spack load`` and ``spack module tcl loads`` commands, on the
other hand, are not very smart: if the user-supplied spec matches
more than one installed package, then ``spack module tcl loads`` will
- fail. This may change in the future. For now, the workaround is to
- be more specific on any ``spack load`` commands that fail.
+ fail. This default behavior may change in the future. For now,
+ the workaround is to either be more specific on any failing ``spack load``
+ commands or to use ``spack load --first`` to allow spack to load the
+ first matching spec.
""""""""""""""""""""""
@@ -444,7 +446,7 @@ environment.
A single-prefix filesystem view is a single directory tree that is the
union of the directory hierarchies of a number of installed packages;
-it is similar to the directory hiearchy that might exist under
+it is similar to the directory hierarchy that might exist under
``/usr/local``. The files of the view's installed packages are
brought into the view by symbolic or hard links, referencing the
original Spack installation.
@@ -1237,7 +1239,7 @@ you can also manually set them in your ``.bashrc``.
2. Other package managers like Homebrew will try to install things to the
same directory. If you plan on using Homebrew in conjunction with Spack,
don't symlink things to ``/usr/local``.
- 3. If you are on a shared workstation, or don't have sudo priveleges, you
+ 3. If you are on a shared workstation, or don't have sudo privileges, you
can't do this.
If you still want to do this anyway, there are several ways around SIP.
@@ -1467,7 +1469,7 @@ In order to build and run the image, execute:
SPACK_ROOT=/usr/local \
FORCE_UNSAFE_CONFIGURE=1
- # install minimal spack depedencies
+ # install minimal spack dependencies
RUN apt-get update \
&& apt-get install -y --no-install-recommends \
autoconf \
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 73c5759dfe..f2b8bf577f 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -43,7 +43,7 @@ parameters=(
# The compiler input variables are checked for sanity later:
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
# The default compiler flags are passed from these variables:
-# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
+# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FFLAGS,
# SPACK_LDFLAGS, SPACK_LDLIBS
# Debug env var is optional; set to "TRUE" for debug logging:
# SPACK_DEBUG
diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py
index 27f00efb09..230ec170b2 100644
--- a/lib/spack/external/__init__.py
+++ b/lib/spack/external/__init__.py
@@ -88,10 +88,11 @@ pytest
* Homepage: https://pypi.python.org/pypi/pytest
* Usage: Testing framework used by Spack.
* Version: 3.2.5 (last version supporting Python 2.6)
-* Note: This package has been slightly modified to improve
- Python 2.6 compatibility. See the following commit if the
- vendored copy ever needs to be updated again:
- https://github.com/spack/spack/pull/6801/commits/ff513c39f2c67ff615de5cbc581dd69a8ec96526
+* Note: This package has been slightly modified:
+ * We improve Python 2.6 compatibility. See:
+ https://github.com/spack/spack/pull/6801.
+ * We have patched pytest not to depend on setuptools. See:
+ https://github.com/spack/spack/pull/15612
ruamel.yaml
------
@@ -125,4 +126,5 @@ altgraph
* Homepage: https://altgraph.readthedocs.io/en/latest/index.html
* Usage: dependency of macholib
* Version: 0.16.1
+
"""
diff --git a/lib/spack/external/_pytest/config.py b/lib/spack/external/_pytest/config.py
index 19835d2c39..513478a972 100644
--- a/lib/spack/external/_pytest/config.py
+++ b/lib/spack/external/_pytest/config.py
@@ -1028,34 +1028,13 @@ class Config(object):
except SystemError:
mode = 'plain'
else:
- self._mark_plugins_for_rewrite(hook)
+ # REMOVED FOR SPACK: This routine imports `pkg_resources` from
+ # `setuptools`, but we do not need it for Spack. We have removed
+ # it from Spack to avoid a dependency on setuptools.
+ # self._mark_plugins_for_rewrite(hook)
+ pass
self._warn_about_missing_assertion(mode)
- def _mark_plugins_for_rewrite(self, hook):
- """
- Given an importhook, mark for rewrite any top-level
- modules or packages in the distribution package for
- all pytest plugins.
- """
- import pkg_resources
- self.pluginmanager.rewrite_hook = hook
-
- # 'RECORD' available for plugins installed normally (pip install)
- # 'SOURCES.txt' available for plugins installed in dev mode (pip install -e)
- # for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa
- # so it shouldn't be an issue
- metadata_files = 'RECORD', 'SOURCES.txt'
-
- package_files = (
- entry.split(',')[0]
- for entrypoint in pkg_resources.iter_entry_points('pytest11')
- for metadata in metadata_files
- for entry in entrypoint.dist._get_metadata(metadata)
- )
-
- for name in _iter_rewritable_modules(package_files):
- hook.mark_rewrite(name)
-
def _warn_about_missing_assertion(self, mode):
try:
assert False
@@ -1081,7 +1060,12 @@ class Config(object):
self._checkversion()
self._consider_importhook(args)
self.pluginmanager.consider_preparse(args)
- self.pluginmanager.load_setuptools_entrypoints('pytest11')
+
+ # REMOVED FOR SPACK: This routine imports `pkg_resources` from
+ # `setuptools`, but we do not need it for Spack. We have removed
+ # it from Spack to avoid a dependency on setuptools.
+ # self.pluginmanager.load_setuptools_entrypoints('pytest11')
+
self.pluginmanager.consider_env()
self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy())
if self.known_args_namespace.confcutdir is None and self.inifile:
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy.py b/lib/spack/external/_pytest/vendored_packages/pluggy.py
index aebddad01d..6f26552d73 100644
--- a/lib/spack/external/_pytest/vendored_packages/pluggy.py
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy.py
@@ -497,26 +497,6 @@ class PluginManager(object):
"unknown hook %r in plugin %r" %
(name, hookimpl.plugin))
- def load_setuptools_entrypoints(self, entrypoint_name):
- """ Load modules from querying the specified setuptools entrypoint name.
- Return the number of loaded plugins. """
- from pkg_resources import (iter_entry_points, DistributionNotFound,
- VersionConflict)
- for ep in iter_entry_points(entrypoint_name):
- # is the plugin registered or blocked?
- if self.get_plugin(ep.name) or self.is_blocked(ep.name):
- continue
- try:
- plugin = ep.load()
- except DistributionNotFound:
- continue
- except VersionConflict as e:
- raise PluginValidationError(
- "Plugin %r could not be loaded: %s!" % (ep.name, e))
- self.register(plugin, name=ep.name)
- self._plugin_distinfo.append((plugin, ep.dist))
- return len(self._plugin_distinfo)
-
def list_plugin_distinfo(self):
""" return list of distinfo/plugin tuples for all setuptools registered
plugins. """
diff --git a/lib/spack/external/altgraph/__init__.py b/lib/spack/external/altgraph/__init__.py
index 289c6408d1..ee70a9c91b 100644
--- a/lib/spack/external/altgraph/__init__.py
+++ b/lib/spack/external/altgraph/__init__.py
@@ -139,9 +139,12 @@ To display the graph we can use the GraphViz backend::
@contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
'''
-import pkg_resources
-__version__ = pkg_resources.require('altgraph')[0].version
-
+# import pkg_resources
+# __version__ = pkg_resources.require('altgraph')[0].version
+# pkg_resources is not finding the altgraph import despite the fact that it is in sys.path
+# there is no .dist-info or .egg-info for pkg_resources to query the version from
+# so it must be set manually
+__version__ = '0.16.1'
class GraphError(ValueError):
pass
diff --git a/lib/spack/external/distro.py b/lib/spack/external/distro.py
index b63451640a..e3805de75f 100644
--- a/lib/spack/external/distro.py
+++ b/lib/spack/external/distro.py
@@ -64,6 +64,7 @@ NORMALIZED_LSB_ID = {
'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux
'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation
'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server
+ 'redhatenterprisecomputenode': 'rhel', # RHEL 6 ComputeNode
}
#: Translation table for normalizing the distro ID derived from the file name
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 9991a66965..0fb6feae02 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -10,6 +10,9 @@ import tarfile
import shutil
import tempfile
import hashlib
+import glob
+import platform
+
from contextlib import closing
import ruamel.yaml as yaml
@@ -53,7 +56,7 @@ BUILD_CACHE_INDEX_TEMPLATE = '''
BUILD_CACHE_INDEX_ENTRY_TEMPLATE = ' <li><a href="{path}">{path}</a></li>'
-class NoOverwriteException(Exception):
+class NoOverwriteException(spack.error.SpackError):
"""
Raised when a file exists and must be overwritten.
"""
@@ -68,14 +71,18 @@ class NoGpgException(spack.error.SpackError):
"""
Raised when gpg2 is not in PATH
"""
- pass
+
+ def __init__(self, msg):
+ super(NoGpgException, self).__init__(msg)
class NoKeyException(spack.error.SpackError):
"""
Raised when gpg has no default key added.
"""
- pass
+
+ def __init__(self, msg):
+ super(NoKeyException, self).__init__(msg)
class PickKeyException(spack.error.SpackError):
@@ -84,7 +91,7 @@ class PickKeyException(spack.error.SpackError):
"""
def __init__(self, keys):
- err_msg = "Multi keys available for signing\n%s\n" % keys
+ err_msg = "Multiple keys available for signing\n%s\n" % keys
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
super(PickKeyException, self).__init__(err_msg)
@@ -107,7 +114,9 @@ class NewLayoutException(spack.error.SpackError):
"""
Raised if directory layout is different from buildcache.
"""
- pass
+
+ def __init__(self, msg):
+ super(NewLayoutException, self).__init__(msg)
def build_cache_relative_path():
@@ -137,15 +146,21 @@ def read_buildinfo_file(prefix):
return buildinfo
-def write_buildinfo_file(prefix, workdir, rel=False):
+def write_buildinfo_file(spec, workdir, rel=False):
"""
Create a cache file containing information
required for the relocation
"""
+ prefix = spec.prefix
text_to_relocate = []
binary_to_relocate = []
link_to_relocate = []
blacklist = (".spack", "man")
+ prefix_to_hash = dict()
+ prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
+ deps = spack.build_environment.get_rpath_deps(spec.package)
+ for d in deps:
+ prefix_to_hash[str(d.prefix)] = d.dag_hash()
# Do this at during tarball creation to save time when tarball unpacked.
# Used by make_package_relative to determine binaries to change.
for root, dirs, files in os.walk(prefix, topdown=True):
@@ -162,8 +177,8 @@ def write_buildinfo_file(prefix, workdir, rel=False):
link_to_relocate.append(rel_path_name)
else:
msg = 'Absolute link %s to %s ' % (path_name, link)
- msg += 'outside of stage %s ' % prefix
- msg += 'cannot be relocated.'
+ msg += 'outside of prefix %s ' % prefix
+ msg += 'should not be relocated.'
tty.warn(msg)
if relocate.needs_binary_relocation(m_type, m_subtype):
@@ -184,6 +199,7 @@ def write_buildinfo_file(prefix, workdir, rel=False):
buildinfo['relocate_textfiles'] = text_to_relocate
buildinfo['relocate_binaries'] = binary_to_relocate
buildinfo['relocate_links'] = link_to_relocate
+ buildinfo['prefix_to_hash'] = prefix_to_hash
filename = buildinfo_file_name(workdir)
with open(filename, 'w') as outfile:
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
@@ -308,7 +324,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
tmpdir = tempfile.mkdtemp()
cache_prefix = build_cache_prefix(tmpdir)
- tarfile_name = tarball_name(spec, '.tar.bz2')
+ tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
spackfile_path = os.path.join(
@@ -356,7 +372,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
os.remove(temp_tarfile_path)
# create info for later relocation and create tar
- write_buildinfo_file(spec.prefix, workdir, rel=rel)
+ write_buildinfo_file(spec, workdir, rel)
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
@@ -370,15 +386,15 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
tty.die(e)
else:
try:
- make_package_placeholder(workdir, spec, allow_root)
+ check_package_relocatable(workdir, spec, allow_root)
except Exception as e:
shutil.rmtree(workdir)
shutil.rmtree(tarfile_dir)
shutil.rmtree(tmpdir)
tty.die(e)
- # create compressed tarball of the install prefix
- with closing(tarfile.open(tarfile_path, 'w:bz2')) as tar:
+ # create gzip compressed tarball of the install prefix
+ with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
tar.add(name='%s' % workdir,
arcname='%s' % os.path.basename(spec.prefix))
# remove copy of install directory
@@ -400,6 +416,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
buildinfo = {}
buildinfo['relative_prefix'] = os.path.relpath(
spec.prefix, spack.store.layout.root)
+ buildinfo['relative_rpaths'] = rel
spec_dict['buildinfo'] = buildinfo
spec_dict['full_hash'] = spec.full_hash()
@@ -433,6 +450,9 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
web_util.push_to_url(
specfile_path, remote_specfile_path, keep_original=False)
+ tty.msg('Buildache for "%s" written to \n %s' %
+ (spec, remote_spackfile_path))
+
try:
# create an index.html for the build_cache directory so specs can be
# found
@@ -478,100 +498,149 @@ def make_package_relative(workdir, spec, allow_root):
"""
prefix = spec.prefix
buildinfo = read_buildinfo_file(workdir)
- old_path = buildinfo['buildpath']
+ old_layout_root = buildinfo['buildpath']
orig_path_names = list()
cur_path_names = list()
for filename in buildinfo['relocate_binaries']:
orig_path_names.append(os.path.join(prefix, filename))
cur_path_names.append(os.path.join(workdir, filename))
- if spec.architecture.platform == 'darwin':
+ if (spec.architecture.platform == 'darwin' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'darwin'):
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names,
- old_path, allow_root)
- else:
+ old_layout_root)
+ if (spec.architecture.platform == 'linux' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'linux'):
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names,
- old_path, allow_root)
+ old_layout_root)
+ relocate.check_files_relocatable(cur_path_names, allow_root)
orig_path_names = list()
cur_path_names = list()
- for filename in buildinfo.get('relocate_links', []):
- orig_path_names.append(os.path.join(prefix, filename))
- cur_path_names.append(os.path.join(workdir, filename))
+ for linkname in buildinfo.get('relocate_links', []):
+ orig_path_names.append(os.path.join(prefix, linkname))
+ cur_path_names.append(os.path.join(workdir, linkname))
relocate.make_link_relative(cur_path_names, orig_path_names)
-def make_package_placeholder(workdir, spec, allow_root):
+def check_package_relocatable(workdir, spec, allow_root):
"""
Check if package binaries are relocatable.
Change links to placeholder links.
"""
- prefix = spec.prefix
buildinfo = read_buildinfo_file(workdir)
cur_path_names = list()
for filename in buildinfo['relocate_binaries']:
cur_path_names.append(os.path.join(workdir, filename))
relocate.check_files_relocatable(cur_path_names, allow_root)
- cur_path_names = list()
- for filename in buildinfo.get('relocate_links', []):
- cur_path_names.append(os.path.join(workdir, filename))
- relocate.make_link_placeholder(cur_path_names, workdir, prefix)
-
-def relocate_package(workdir, spec, allow_root):
+def relocate_package(spec, allow_root):
"""
Relocate the given package
"""
+ workdir = str(spec.prefix)
buildinfo = read_buildinfo_file(workdir)
- new_path = str(spack.store.layout.root)
- new_prefix = str(spack.paths.prefix)
- old_path = str(buildinfo['buildpath'])
- old_prefix = str(buildinfo.get('spackprefix',
- '/not/in/buildinfo/dictionary'))
- rel = buildinfo.get('relative_rpaths', False)
-
- tty.msg("Relocating package from",
- "%s to %s." % (old_path, new_path))
- path_names = set()
+ new_layout_root = str(spack.store.layout.root)
+ new_prefix = str(spec.prefix)
+ new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
+ new_spack_prefix = str(spack.paths.prefix)
+ old_layout_root = str(buildinfo['buildpath'])
+ old_spack_prefix = str(buildinfo.get('spackprefix'))
+ old_rel_prefix = buildinfo.get('relative_prefix')
+ old_prefix = os.path.join(old_layout_root, old_rel_prefix)
+ rel = buildinfo.get('relative_rpaths')
+ prefix_to_hash = buildinfo.get('prefix_to_hash', None)
+ if (old_rel_prefix != new_rel_prefix and not prefix_to_hash):
+ msg = "Package tarball was created from an install "
+ msg += "prefix with a different directory layout and an older "
+ msg += "buildcache create implementation. It cannot be relocated."
+ raise NewLayoutException(msg)
+ # older buildcaches do not have the prefix_to_hash dictionary
+ # need to set an empty dictionary and add one entry to
+ # prefix_to_prefix to reproduce the old behavior
+ if not prefix_to_hash:
+ prefix_to_hash = dict()
+ hash_to_prefix = dict()
+ hash_to_prefix[spec.format('{hash}')] = str(spec.package.prefix)
+ new_deps = spack.build_environment.get_rpath_deps(spec.package)
+ for d in new_deps:
+ hash_to_prefix[d.format('{hash}')] = str(d.prefix)
+ prefix_to_prefix = dict()
+ for orig_prefix, hash in prefix_to_hash.items():
+ prefix_to_prefix[orig_prefix] = hash_to_prefix.get(hash, None)
+ prefix_to_prefix[old_prefix] = new_prefix
+ prefix_to_prefix[old_layout_root] = new_layout_root
+
+ tty.debug("Relocating package from",
+ "%s to %s." % (old_layout_root, new_layout_root))
+
+ def is_backup_file(file):
+ return file.endswith('~')
+
+ # Text files containing the prefix text
+ text_names = list()
for filename in buildinfo['relocate_textfiles']:
- path_name = os.path.join(workdir, filename)
+ text_name = os.path.join(workdir, filename)
# Don't add backup files generated by filter_file during install step.
- if not path_name.endswith('~'):
- path_names.add(path_name)
- relocate.relocate_text(path_names, oldpath=old_path,
- newpath=new_path, oldprefix=old_prefix,
- newprefix=new_prefix)
- # If the binary files in the package were not edited to use
- # relative RPATHs, then the RPATHs need to be relocated
- if rel:
- if old_path != new_path:
- files_to_relocate = list(filter(
- lambda pathname: not relocate.file_is_relocatable(
- pathname, paths_to_relocate=[old_path, old_prefix]),
- map(lambda filename: os.path.join(workdir, filename),
- buildinfo['relocate_binaries'])))
-
- if len(old_path) < len(new_path) and files_to_relocate:
- tty.debug('Cannot do a binary string replacement with padding '
- 'for package because %s is longer than %s.' %
- (new_path, old_path))
- else:
- for path_name in files_to_relocate:
- relocate.replace_prefix_bin(path_name, old_path, new_path)
- else:
- path_names = set()
- for filename in buildinfo['relocate_binaries']:
- path_name = os.path.join(workdir, filename)
- path_names.add(path_name)
- if spec.architecture.platform == 'darwin':
- relocate.relocate_macho_binaries(path_names, old_path,
- new_path, allow_root)
- else:
- relocate.relocate_elf_binaries(path_names, old_path,
- new_path, allow_root)
- path_names = set()
- for filename in buildinfo.get('relocate_links', []):
- path_name = os.path.join(workdir, filename)
- path_names.add(path_name)
- relocate.relocate_links(path_names, old_path, new_path)
+ if not is_backup_file(text_name):
+ text_names.append(text_name)
+
+# If we are installing back to the same location don't replace anything
+ if old_layout_root != new_layout_root:
+ paths_to_relocate = [old_spack_prefix, old_layout_root]
+ paths_to_relocate.extend(prefix_to_hash.keys())
+ files_to_relocate = list(filter(
+ lambda pathname: not relocate.file_is_relocatable(
+ pathname, paths_to_relocate=paths_to_relocate),
+ map(lambda filename: os.path.join(workdir, filename),
+ buildinfo['relocate_binaries'])))
+ # If the buildcache was not created with relativized rpaths
+ # do the relocation of path in binaries
+ if (spec.architecture.platform == 'darwin' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'darwin'):
+ relocate.relocate_macho_binaries(files_to_relocate,
+ old_layout_root,
+ new_layout_root,
+ prefix_to_prefix, rel,
+ old_prefix,
+ new_prefix)
+ if (spec.architecture.platform == 'linux' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'linux'):
+ relocate.relocate_elf_binaries(files_to_relocate,
+ old_layout_root,
+ new_layout_root,
+ prefix_to_prefix, rel,
+ old_prefix,
+ new_prefix)
+ # Relocate links to the new install prefix
+ link_names = [linkname
+ for linkname in buildinfo.get('relocate_links', [])]
+ relocate.relocate_links(link_names,
+ old_layout_root,
+ new_layout_root,
+ old_prefix,
+ new_prefix,
+ prefix_to_prefix)
+
+ # For all buildcaches
+ # relocate the install prefixes in text files including dependencies
+ relocate.relocate_text(text_names,
+ old_layout_root, new_layout_root,
+ old_prefix, new_prefix,
+ old_spack_prefix,
+ new_spack_prefix,
+ prefix_to_prefix)
+
+ # relocate the install prefixes in binary files including dependencies
+ relocate.relocate_text_bin(files_to_relocate,
+ old_layout_root, new_layout_root,
+ old_prefix, new_prefix,
+ old_spack_prefix,
+ new_spack_prefix,
+ prefix_to_prefix)
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
@@ -589,16 +658,16 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
stagepath = os.path.dirname(filename)
spackfile_name = tarball_name(spec, '.spack')
spackfile_path = os.path.join(stagepath, spackfile_name)
- tarfile_name = tarball_name(spec, '.tar.bz2')
+ tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_path = os.path.join(tmpdir, tarfile_name)
specfile_name = tarball_name(spec, '.spec.yaml')
specfile_path = os.path.join(tmpdir, specfile_name)
with closing(tarfile.open(spackfile_path, 'r')) as tar:
tar.extractall(tmpdir)
- # older buildcache tarfiles use gzip compression
+ # some buildcache tarfiles use bzip2 compression
if not os.path.exists(tarfile_path):
- tarfile_name = tarball_name(spec, '.tar.gz')
+ tarfile_name = tarball_name(spec, '.tar.bz2')
tarfile_path = os.path.join(tmpdir, tarfile_name)
if not unsigned:
if os.path.exists('%s.asc' % specfile_path):
@@ -607,7 +676,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
except Exception as e:
shutil.rmtree(tmpdir)
- tty.die(e)
+ raise e
else:
shutil.rmtree(tmpdir)
raise NoVerifyException(
@@ -636,22 +705,30 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
# if the original relative prefix is in the spec file use it
buildinfo = spec_dict.get('buildinfo', {})
old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
+ rel = buildinfo.get('relative_rpaths')
# if the original relative prefix and new relative prefix differ the
# directory layout has changed and the buildcache cannot be installed
- if old_relative_prefix != new_relative_prefix:
- shutil.rmtree(tmpdir)
- msg = "Package tarball was created from an install "
- msg += "prefix with a different directory layout.\n"
- msg += "It cannot be relocated."
- raise NewLayoutException(msg)
+ # if it was created with relative rpaths
+ info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s'
+ tty.debug(info %
+ (old_relative_prefix, new_relative_prefix, rel))
+# if (old_relative_prefix != new_relative_prefix and (rel)):
+# shutil.rmtree(tmpdir)
+# msg = "Package tarball was created from an install "
+# msg += "prefix with a different directory layout. "
+# msg += "It cannot be relocated because it "
+# msg += "uses relative rpaths."
+# raise NewLayoutException(msg)
# extract the tarball in a temp directory
with closing(tarfile.open(tarfile_path, 'r')) as tar:
tar.extractall(path=tmpdir)
- # the base of the install prefix is used when creating the tarball
- # so the pathname should be the same now that the directory layout
- # is confirmed
- workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
+ # get the parent directory of the file .spack/binary_distribution
+ # this should the directory unpacked from the tarball whose
+ # name is unknown because the prefix naming is unknown
+ bindist_file = glob.glob('%s/*/.spack/binary_distribution' % tmpdir)[0]
+ workdir = re.sub('/.spack/binary_distribution$', '', bindist_file)
+ tty.debug('workdir %s' % workdir)
# install_tree copies hardlinks
# create a temporary tarfile from prefix and exract it to workdir
# tarfile preserves hardlinks
@@ -669,10 +746,10 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
os.remove(specfile_path)
try:
- relocate_package(spec.prefix, spec, allow_root)
+ relocate_package(spec, allow_root)
except Exception as e:
shutil.rmtree(spec.prefix)
- tty.die(e)
+ raise e
else:
manifest_file = os.path.join(spec.prefix,
spack.store.layout.metadata_dir,
@@ -682,6 +759,8 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
tty.warn('No manifest file in tarball for spec %s' % spec_id)
finally:
shutil.rmtree(tmpdir)
+ if os.path.exists(filename):
+ os.remove(filename)
# Internal cache for downloaded specs
@@ -729,7 +808,7 @@ def get_spec(spec=None, force=False):
tty.debug("No Spack mirrors are currently configured")
return {}
- if spec in _cached_specs:
+ if _cached_specs and spec in _cached_specs:
return _cached_specs
for mirror in spack.mirror.MirrorCollection().values():
@@ -799,6 +878,7 @@ def get_specs(force=False, allarch=False):
def get_keys(install=False, trust=False, force=False):
"""
Get pgp public keys available on mirror
+ with suffix .key or .pub
"""
if not spack.mirror.MirrorCollection():
tty.die("Please add a spack mirror to allow " +
@@ -813,18 +893,20 @@ def get_keys(install=False, trust=False, force=False):
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
if mirror_dir:
tty.msg("Finding public keys in %s" % mirror_dir)
- files = os.listdir(mirror_dir)
+ files = os.listdir(str(mirror_dir))
for file in files:
- if re.search(r'\.key', file):
+ if re.search(r'\.key', file) or re.search(r'\.pub', file):
link = url_util.join(fetch_url_build_cache, file)
keys.add(link)
else:
tty.msg("Finding public keys at %s" %
url_util.format(fetch_url_build_cache))
- p, links = web_util.spider(fetch_url_build_cache, depth=1)
+ # For s3 mirror need to request index.html directly
+ p, links = web_util.spider(
+ url_util.join(fetch_url_build_cache, 'index.html'))
for link in links:
- if re.search(r'\.key', link):
+ if re.search(r'\.key', link) or re.search(r'\.pub', link):
keys.add(link)
for link in keys:
diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py
index c21b8dad71..5b4f223d41 100644
--- a/lib/spack/spack/build_systems/autotools.py
+++ b/lib/spack/spack/build_systems/autotools.py
@@ -263,6 +263,12 @@ class AutotoolsPackage(PackageBase):
if values:
values_str = '{0}={1}'.format(flag.upper(), ' '.join(values))
self.configure_flag_args.append(values_str)
+ # Spack's fflags are meant for both F77 and FC, therefore we
+ # additionaly set FCFLAGS if required.
+ values = flags.get('fflags', None)
+ if values:
+ values_str = 'FCFLAGS={0}'.format(' '.join(values))
+ self.configure_flag_args.append(values_str)
def configure(self, spec, prefix):
"""Runs configure with the arguments specified in
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
index 14f33e94e6..d7da957a9d 100644
--- a/lib/spack/spack/build_systems/cmake.py
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -147,33 +147,129 @@ class CMakePackage(PackageBase):
except KeyError:
build_type = 'RelWithDebInfo'
+ define = CMakePackage.define
args = [
'-G', generator,
- '-DCMAKE_INSTALL_PREFIX:PATH={0}'.format(pkg.prefix),
- '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type),
+ define('CMAKE_INSTALL_PREFIX', pkg.prefix),
+ define('CMAKE_BUILD_TYPE', build_type),
]
if primary_generator == 'Unix Makefiles':
- args.append('-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON')
+ args.append(define('CMAKE_VERBOSE_MAKEFILE', True))
if platform.mac_ver()[0]:
args.extend([
- '-DCMAKE_FIND_FRAMEWORK:STRING=LAST',
- '-DCMAKE_FIND_APPBUNDLE:STRING=LAST'
+ define('CMAKE_FIND_FRAMEWORK', "LAST"),
+ define('CMAKE_FIND_APPBUNDLE', "LAST"),
])
# Set up CMake rpath
- args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE')
- rpaths = ';'.join(spack.build_environment.get_rpaths(pkg))
- args.append('-DCMAKE_INSTALL_RPATH:STRING={0}'.format(rpaths))
+ args.extend([
+ define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False),
+ define('CMAKE_INSTALL_RPATH',
+ spack.build_environment.get_rpaths(pkg)),
+ ])
# CMake's find_package() looks in CMAKE_PREFIX_PATH first, help CMake
# to find immediate link dependencies in right places:
deps = [d.prefix for d in
pkg.spec.dependencies(deptype=('build', 'link'))]
deps = filter_system_paths(deps)
- args.append('-DCMAKE_PREFIX_PATH:STRING={0}'.format(';'.join(deps)))
+ args.append(define('CMAKE_PREFIX_PATH', deps))
return args
+ @staticmethod
+ def define(cmake_var, value):
+ """Return a CMake command line argument that defines a variable.
+
+ The resulting argument will convert boolean values to OFF/ON
+ and lists/tuples to CMake semicolon-separated string lists. All other
+ values will be interpreted as strings.
+
+ Examples:
+
+ .. code-block:: python
+
+ [define('BUILD_SHARED_LIBS', True),
+ define('CMAKE_CXX_STANDARD', 14),
+ define('swr', ['avx', 'avx2'])]
+
+ will generate the following configuration options:
+
+ .. code-block:: console
+
+ ["-DBUILD_SHARED_LIBS:BOOL=ON",
+ "-DCMAKE_CXX_STANDARD:STRING=14",
+ "-DSWR:STRING=avx;avx2]
+
+ """
+ # Create a list of pairs. Each pair includes a configuration
+ # option and whether or not that option is activated
+ if isinstance(value, bool):
+ kind = 'BOOL'
+ value = "ON" if value else "OFF"
+ else:
+ kind = 'STRING'
+ if isinstance(value, (list, tuple)):
+ value = ";".join(str(v) for v in value)
+ else:
+ value = str(value)
+
+ return "".join(["-D", cmake_var, ":", kind, "=", value])
+
+ def define_from_variant(self, cmake_var, variant=None):
+ """Return a CMake command line argument from the given variant's value.
+
+ The optional ``variant`` argument defaults to the lower-case transform
+ of ``cmake_var``.
+
+ This utility function is similar to
+ :py:meth:`~.AutotoolsPackage.with_or_without`.
+
+ Examples:
+
+ Given a package with:
+
+ .. code-block:: python
+
+ variant('cxxstd', default='11', values=('11', '14'),
+ multi=False, description='')
+ variant('shared', default=True, description='')
+ variant('swr', values=any_combination_of('avx', 'avx2'),
+ description='')
+
+ calling this function like:
+
+ .. code-block:: python
+
+ [define_from_variant('BUILD_SHARED_LIBS', 'shared'),
+ define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
+ define_from_variant('SWR')]
+
+ will generate the following configuration options:
+
+ .. code-block:: console
+
+ ["-DBUILD_SHARED_LIBS:BOOL=ON",
+ "-DCMAKE_CXX_STANDARD:STRING=14",
+ "-DSWR:STRING=avx;avx2]
+
+ for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
+ """
+
+ if variant is None:
+ variant = cmake_var.lower()
+
+ if variant not in self.variants:
+ raise KeyError(
+ '"{0}" is not a variant of "{1}"'.format(variant, self.name))
+
+ value = self.spec.variants[variant].value
+ if isinstance(value, (tuple, list)):
+ # Sort multi-valued variants for reproducibility
+ value = sorted(value)
+
+ return self.define(cmake_var, value)
+
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass the specified
compiler flags to cmake. Note CMAKE does not have a cppflags option,
diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py
index fc96cffe60..cbee710049 100644
--- a/lib/spack/spack/build_systems/cuda.py
+++ b/lib/spack/spack/build_systems/cuda.py
@@ -13,39 +13,65 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage.
"""
+ maintainers = ['ax3l', 'svenevs']
- # FIXME: keep cuda and cuda_arch separate to make usage easier untill
+ # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
+ # https://developer.nvidia.com/cuda-gpus
+ # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
+ cuda_arch_values = [
+ '10', '11', '12', '13',
+ '20', '21',
+ '30', '32', '35', '37',
+ '50', '52', '53',
+ '60', '61', '62',
+ '70', '72', '75',
+ ]
+
+ # FIXME: keep cuda and cuda_arch separate to make usage easier until
# Spack has depends_on(cuda, when='cuda_arch!=None') or alike
variant('cuda', default=False,
description='Build with CUDA')
- # see http://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
- # https://developer.nvidia.com/cuda-gpus
+
variant('cuda_arch',
description='CUDA architecture',
- values=spack.variant.any_combination_of(
- '20', '30', '32', '35', '50', '52', '53', '60', '61',
- '62', '70', '72', '75'
- ))
+ values=spack.variant.any_combination_of(*cuda_arch_values))
- # see http://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
- # and http://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
+ # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
+ # https://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
@staticmethod
def cuda_flags(arch_list):
return [('--generate-code arch=compute_{0},code=sm_{0} '
'--generate-code arch=compute_{0},code=compute_{0}').format(s)
for s in arch_list]
- depends_on("cuda@7:", when='+cuda')
+ depends_on('cuda', when='+cuda')
# CUDA version vs Architecture
- depends_on("cuda@8:", when='cuda_arch=60')
- depends_on("cuda@8:", when='cuda_arch=61')
- depends_on("cuda@8:", when='cuda_arch=62')
- depends_on("cuda@9:", when='cuda_arch=70')
- depends_on("cuda@9:", when='cuda_arch=72')
- depends_on("cuda@10:", when='cuda_arch=75')
+ # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
+ depends_on('cuda@:6.0', when='cuda_arch=10')
+ depends_on('cuda@:6.5', when='cuda_arch=11')
+ depends_on('cuda@2.1:6.5', when='cuda_arch=12')
+ depends_on('cuda@2.1:6.5', when='cuda_arch=13')
+
+ depends_on('cuda@3.0:8.0', when='cuda_arch=20')
+ depends_on('cuda@3.2:8.0', when='cuda_arch=21')
+
+ depends_on('cuda@5.0:10.2', when='cuda_arch=30')
+ depends_on('cuda@5.0:10.2', when='cuda_arch=32')
+ depends_on('cuda@5.0:10.2', when='cuda_arch=35')
+ depends_on('cuda@6.5:10.2', when='cuda_arch=37')
+
+ depends_on('cuda@6.0:', when='cuda_arch=50')
+ depends_on('cuda@6.5:', when='cuda_arch=52')
+ depends_on('cuda@6.5:', when='cuda_arch=53')
+
+ depends_on('cuda@8.0:', when='cuda_arch=60')
+ depends_on('cuda@8.0:', when='cuda_arch=61')
+ depends_on('cuda@8.0:', when='cuda_arch=62')
- depends_on('cuda@:8', when='cuda_arch=20')
+ depends_on('cuda@9.0:', when='cuda_arch=70')
+ depends_on('cuda@9.0:', when='cuda_arch=72')
+ depends_on('cuda@10.0:', when='cuda_arch=75')
# There are at least three cases to be aware of for compiler conflicts
# 1. Linux x86_64
@@ -114,12 +140,12 @@ class CudaPackage(PackageBase):
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
- conflicts('%intel@19.0:', when='+cuda ^cuda@:10.2.89')
+ conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
# XL is mostly relevant for ppc64le Linux
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
- conflicts('%xl@17:', when='+cuda ^cuda@10.0.130:10.2.89')
+ conflicts('%xl@17:', when='+cuda ^cuda@:10.2.89')
# Mac OS X
# platform = ' platform=darwin'
@@ -130,18 +156,8 @@ class CudaPackage(PackageBase):
# `clang-apple@x.y.z as a possible fix.
# Compiler conflicts will be eventual taken from here:
# https://docs.nvidia.com/cuda/cuda-installation-guide-mac-os-x/index.html#abstract
+ conflicts('platform=darwin', when='+cuda ^cuda@11.0:')
# Make sure cuda_arch can not be used without +cuda
- conflicts('~cuda', when='cuda_arch=20')
- conflicts('~cuda', when='cuda_arch=30')
- conflicts('~cuda', when='cuda_arch=32')
- conflicts('~cuda', when='cuda_arch=35')
- conflicts('~cuda', when='cuda_arch=50')
- conflicts('~cuda', when='cuda_arch=52')
- conflicts('~cuda', when='cuda_arch=53')
- conflicts('~cuda', when='cuda_arch=60')
- conflicts('~cuda', when='cuda_arch=61')
- conflicts('~cuda', when='cuda_arch=62')
- conflicts('~cuda', when='cuda_arch=70')
- conflicts('~cuda', when='cuda_arch=72')
- conflicts('~cuda', when='cuda_arch=75')
+ for value in cuda_arch_values:
+ conflicts('~cuda', when='cuda_arch=' + value)
diff --git a/lib/spack/spack/build_systems/sip.py b/lib/spack/spack/build_systems/sip.py
index 314f91d5d2..f814ef1837 100644
--- a/lib/spack/spack/build_systems/sip.py
+++ b/lib/spack/spack/build_systems/sip.py
@@ -5,9 +5,10 @@
import inspect
-from llnl.util.filesystem import working_dir
-from spack.directives import depends_on, extends, resource
-from spack.package import PackageBase, run_before, run_after
+from llnl.util.filesystem import working_dir, join_path
+from spack.directives import depends_on, extends
+from spack.package import PackageBase, run_after
+import os
class SIPPackage(PackageBase):
@@ -40,33 +41,12 @@ class SIPPackage(PackageBase):
extends('python')
depends_on('qt')
-
- resource(name='sip',
- url='https://www.riverbankcomputing.com/static/Downloads/sip/4.19.18/sip-4.19.18.tar.gz',
- sha256='c0bd863800ed9b15dcad477c4017cdb73fa805c25908b0240564add74d697e1e',
- destination='.')
+ depends_on('py-sip')
def python(self, *args, **kwargs):
"""The python ``Executable``."""
inspect.getmodule(self).python(*args, **kwargs)
- @run_before('configure')
- def install_sip(self):
- args = [
- '--sip-module={0}'.format(self.sip_module),
- '--bindir={0}'.format(self.prefix.bin),
- '--destdir={0}'.format(inspect.getmodule(self).site_packages_dir),
- '--incdir={0}'.format(inspect.getmodule(self).python_include_dir),
- '--sipdir={0}'.format(self.prefix.share.sip),
- '--stubsdir={0}'.format(inspect.getmodule(self).site_packages_dir),
- ]
-
- with working_dir('sip-4.19.18'):
- self.python('configure.py', *args)
-
- inspect.getmodule(self).make()
- inspect.getmodule(self).make('install')
-
def configure_file(self):
"""Returns the name of the configure file to use."""
return 'configure.py'
@@ -77,12 +57,15 @@ class SIPPackage(PackageBase):
args = self.configure_args()
+ python_include_dir = 'python' + str(spec['python'].version.up_to(2))
+
args.extend([
'--verbose',
'--confirm-license',
'--qmake', spec['qt'].prefix.bin.qmake,
- '--sip', prefix.bin.sip,
- '--sip-incdir', inspect.getmodule(self).python_include_dir,
+ '--sip', spec['py-sip'].prefix.bin.sip,
+ '--sip-incdir', join_path(spec['py-sip'].prefix.include,
+ python_include_dir),
'--bindir', prefix.bin,
'--destdir', inspect.getmodule(self).site_packages_dir,
])
@@ -131,3 +114,14 @@ class SIPPackage(PackageBase):
# Check that self.prefix is there after installation
run_after('install')(PackageBase.sanity_check_prefix)
+
+ @run_after('install')
+ def extend_path_setup(self):
+ # See github issue #14121 and PR #15297
+ module = self.spec['py-sip'].variants['module'].value
+ if module != 'sip':
+ module = module.split('.')[0]
+ with working_dir(inspect.getmodule(self).site_packages_dir):
+ with open(os.path.join(module, '__init__.py'), 'a') as f:
+ f.write('from pkgutil import extend_path\n')
+ f.write('__path__ = extend_path(__path__, __name__)\n')
diff --git a/lib/spack/spack/build_systems/sourceware.py b/lib/spack/spack/build_systems/sourceware.py
new file mode 100644
index 0000000000..b779b530dc
--- /dev/null
+++ b/lib/spack/spack/build_systems/sourceware.py
@@ -0,0 +1,37 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import spack.util.url
+import spack.package
+
+
+class SourcewarePackage(spack.package.PackageBase):
+ """Mixin that takes care of setting url and mirrors for Sourceware.org
+ packages."""
+ #: Path of the package in a Sourceware mirror
+ sourceware_mirror_path = None
+
+ #: List of Sourceware mirrors used by Spack
+ base_mirrors = [
+ 'https://sourceware.org/pub/',
+ 'https://mirrors.kernel.org/sourceware/',
+ 'https://ftp.gwdg.de/pub/linux/sources.redhat.com/'
+ ]
+
+ @property
+ def urls(self):
+ self._ensure_sourceware_mirror_path_is_set_or_raise()
+ return [
+ spack.util.url.join(m, self.sourceware_mirror_path,
+ resolve_href=True)
+ for m in self.base_mirrors
+ ]
+
+ def _ensure_sourceware_mirror_path_is_set_or_raise(self):
+ if self.sourceware_mirror_path is None:
+ cls_name = type(self).__name__
+ msg = ('{0} must define a `sourceware_mirror_path` attribute'
+ ' [none defined]')
+ raise AttributeError(msg.format(cls_name))
diff --git a/lib/spack/spack/build_systems/xorg.py b/lib/spack/spack/build_systems/xorg.py
new file mode 100644
index 0000000000..815bb7c26b
--- /dev/null
+++ b/lib/spack/spack/build_systems/xorg.py
@@ -0,0 +1,37 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import spack.util.url
+import spack.package
+
+
+class XorgPackage(spack.package.PackageBase):
+ """Mixin that takes care of setting url and mirrors for x.org
+ packages."""
+ #: Path of the package in a x.org mirror
+ xorg_mirror_path = None
+
+ #: List of x.org mirrors used by Spack
+ base_mirrors = [
+ 'https://www.x.org/archive/individual/',
+ 'https://mirrors.ircam.fr/pub/x.org/individual/',
+ 'http://xorg.mirrors.pair.com/individual/'
+ ]
+
+ @property
+ def urls(self):
+ self._ensure_xorg_mirror_path_is_set_or_raise()
+ return [
+ spack.util.url.join(m, self.xorg_mirror_path,
+ resolve_href=True)
+ for m in self.base_mirrors
+ ]
+
+ def _ensure_xorg_mirror_path_is_set_or_raise(self):
+ if self.xorg_mirror_path is None:
+ cls_name = type(self).__name__
+ msg = ('{0} must define a `xorg_mirror_path` attribute'
+ ' [none defined]')
+ raise AttributeError(msg.format(cls_name))
diff --git a/lib/spack/spack/caches.py b/lib/spack/spack/caches.py
index 98fa8d5795..49624c06b2 100644
--- a/lib/spack/spack/caches.py
+++ b/lib/spack/spack/caches.py
@@ -50,8 +50,9 @@ def _fetch_cache():
class MirrorCache(object):
- def __init__(self, root):
+ def __init__(self, root, skip_unstable_versions):
self.root = os.path.abspath(root)
+ self.skip_unstable_versions = skip_unstable_versions
def store(self, fetcher, relative_dest):
"""Fetch and relocate the fetcher's target into our mirror cache."""
@@ -84,5 +85,3 @@ class MirrorCache(object):
#: Spack's local cache for downloaded source archives
fetch_cache = llnl.util.lang.Singleton(_fetch_cache)
-
-mirror_cache = None
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index ed06524073..cbdfccb8bf 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -947,8 +947,9 @@ def read_cdashid_from_mirror(spec, mirror_url):
def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id):
if mirror_url:
tty.debug('Creating buildcache')
- buildcache._createtarball(env, yaml_path, None, mirror_url, None,
- True, True, False, False, True, False)
+ buildcache._createtarball(env, yaml_path, None, True, False,
+ mirror_url, None, True, False, False, True,
+ False)
if build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
build_id, mirror_url))
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 2a75a87b54..83e12004a1 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -177,7 +177,7 @@ def elide_list(line_list, max_num=10):
return line_list
-def disambiguate_spec(spec, env, local=False, installed=True):
+def disambiguate_spec(spec, env, local=False, installed=True, first=False):
"""Given a spec, figure out which installed package it refers to.
Arguments:
@@ -190,10 +190,11 @@ def disambiguate_spec(spec, env, local=False, installed=True):
database query. See ``spack.database.Database._query`` for details.
"""
hashes = env.all_hashes() if env else None
- return disambiguate_spec_from_hashes(spec, hashes, local, installed)
+ return disambiguate_spec_from_hashes(spec, hashes, local, installed, first)
-def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True):
+def disambiguate_spec_from_hashes(spec, hashes, local=False,
+ installed=True, first=False):
"""Given a spec and a list of hashes, get concrete spec the spec refers to.
Arguments:
@@ -213,6 +214,9 @@ def disambiguate_spec_from_hashes(spec, hashes, local=False, installed=True):
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
+ elif first:
+ return matching_specs[0]
+
elif len(matching_specs) > 1:
format_string = '{name}{@version}{%compiler}{arch=architecture}'
args = ["%s matches multiple packages." % spec,
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 392984f852..35e735cdf1 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -52,16 +52,35 @@ def setup_parser(subparser):
create.add_argument('-k', '--key', metavar='key',
type=str, default=None,
help="Key for signing.")
- create.add_argument('-d', '--directory', metavar='directory',
- type=str, default='.',
- help="directory in which to save the tarballs.")
+ output = create.add_mutually_exclusive_group(required=True)
+ output.add_argument('-d', '--directory',
+ metavar='directory',
+ type=str,
+ help="local directory where " +
+ "buildcaches will be written.")
+ output.add_argument('-m', '--mirror-name',
+ metavar='mirror-name',
+ type=str,
+ help="name of the mirror where " +
+ "buildcaches will be written.")
+ output.add_argument('--mirror-url',
+ metavar='mirror-url',
+ type=str,
+ help="URL of the mirror where " +
+ "buildcaches will be written.")
create.add_argument('--no-rebuild-index', action='store_true',
default=False, help="skip rebuilding index after " +
"building package(s)")
create.add_argument('-y', '--spec-yaml', default=None,
help='Create buildcache entry for spec from yaml file')
- create.add_argument('--no-deps', action='store_true', default='false',
- help='Create buildcache entry wo/ dependencies')
+ create.add_argument('--only', default='package,dependencies',
+ dest='things_to_install',
+ choices=['package', 'dependencies'],
+ help=('Select the buildcache mode. the default is to'
+ ' build a cache for the package along with all'
+ ' its dependencies. Alternatively, one can'
+ ' decide to build a cache for only the package'
+ ' or only the dependencies'))
arguments.add_common_arguments(create, ['specs'])
create.set_defaults(func=createtarball)
@@ -76,6 +95,10 @@ def setup_parser(subparser):
install.add_argument('-u', '--unsigned', action='store_true',
help="install unsigned buildcache" +
" tarballs for testing")
+ install.add_argument('-o', '--otherarch', action='store_true',
+ help="install specs from other architectures" +
+ " instead of default platform and OS")
+
arguments.add_common_arguments(install, ['specs'])
install.set_defaults(func=installtarball)
@@ -252,7 +275,8 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
return specs_from_cli
-def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
+def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
+ other_arch=False):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
@@ -266,7 +290,7 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
- allarch = False
+ allarch = other_arch
specs = bindist.get_specs(force, allarch)
for pkg in pkgs:
matches = []
@@ -299,8 +323,9 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
return specs_from_cli
-def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
- rel, unsigned, allow_root, no_rebuild_index):
+def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
+ output_location, key, force, rel, unsigned, allow_root,
+ no_rebuild_index):
if spec_yaml:
packages = set()
with open(spec_yaml, 'r') as fd:
@@ -320,13 +345,12 @@ def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
pkgs = set(packages)
specs = set()
- outdir = '.'
- if directory:
- outdir = directory
-
- mirror = spack.mirror.MirrorCollection().lookup(outdir)
+ mirror = spack.mirror.MirrorCollection().lookup(output_location)
outdir = url_util.format(mirror.push_url)
+ msg = 'Buildcache files will be output to %s/build_cache' % outdir
+ tty.msg(msg)
+
signkey = None
if key:
signkey = key
@@ -342,14 +366,23 @@ def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
tty.debug('skipping external or virtual spec %s' %
match.format())
else:
- tty.debug('adding matching spec %s' % match.format())
- specs.add(match)
- if no_deps is True:
+ if add_spec:
+ tty.debug('adding matching spec %s' % match.format())
+ specs.add(match)
+ else:
+ tty.debug('skipping matching spec %s' % match.format())
+
+ if not add_deps:
continue
+
tty.debug('recursing dependencies')
for d, node in match.traverse(order='post',
depth=True,
deptype=('link', 'run')):
+ # skip root, since it's handled above
+ if d == 0:
+ continue
+
if node.external or node.virtual:
tty.debug('skipping external or virtual dependency %s' %
node.format())
@@ -360,14 +393,10 @@ def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
tty.debug('writing tarballs to %s/build_cache' % outdir)
for spec in specs:
- tty.msg('creating binary cache file for package %s ' % spec.format())
- try:
- bindist.build_tarball(spec, outdir, force, rel,
- unsigned, allow_root, signkey,
- not no_rebuild_index)
- except Exception as e:
- tty.warn('%s' % e)
- pass
+ tty.debug('creating binary cache file for package %s ' % spec.format())
+ bindist.build_tarball(spec, outdir, force, rel,
+ unsigned, allow_root, signkey,
+ not no_rebuild_index)
def createtarball(args):
@@ -376,9 +405,47 @@ def createtarball(args):
# restrict matching to current environment if one is active
env = ev.get_env(args, 'buildcache create')
- _createtarball(env, args.spec_yaml, args.specs, args.directory,
- args.key, args.no_deps, args.force, args.rel, args.unsigned,
- args.allow_root, args.no_rebuild_index)
+ output_location = None
+ if args.directory:
+ output_location = args.directory
+
+ # User meant to provide a path to a local directory.
+ # Ensure that they did not accidentally pass a URL.
+ scheme = url_util.parse(output_location, scheme='<missing>').scheme
+ if scheme != '<missing>':
+ raise ValueError(
+ '"--directory" expected a local path; got a URL, instead')
+
+ # User meant to provide a path to a local directory.
+ # Ensure that the mirror lookup does not mistake it for a named mirror.
+ output_location = 'file://' + output_location
+
+ elif args.mirror_name:
+ output_location = args.mirror_name
+
+ # User meant to provide the name of a preconfigured mirror.
+ # Ensure that the mirror lookup actually returns a named mirror.
+ result = spack.mirror.MirrorCollection().lookup(output_location)
+ if result.name == "<unnamed>":
+ raise ValueError(
+ 'no configured mirror named "{name}"'.format(
+ name=output_location))
+
+ elif args.mirror_url:
+ output_location = args.mirror_url
+
+ # User meant to provide a URL for an anonymous mirror.
+ # Ensure that they actually provided a URL.
+ scheme = url_util.parse(output_location, scheme='<missing>').scheme
+ if scheme == '<missing>':
+ raise ValueError(
+ '"{url}" is not a valid URL'.format(url=output_location))
+ add_spec = ('package' in args.things_to_install)
+ add_deps = ('dependencies' in args.things_to_install)
+
+ _createtarball(env, args.spec_yaml, args.specs, add_spec, add_deps,
+ output_location, args.key, args.force, args.rel,
+ args.unsigned, args.allow_root, args.no_rebuild_index)
def installtarball(args):
@@ -387,7 +454,8 @@ def installtarball(args):
tty.die("build cache file installation requires" +
" at least one package spec argument")
pkgs = set(args.specs)
- matches = match_downloaded_specs(pkgs, args.multiple, args.force)
+ matches = match_downloaded_specs(pkgs, args.multiple, args.force,
+ args.otherarch)
for match in matches:
install_tarball(match, args)
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 343915868c..eaeaf5337f 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -56,7 +56,8 @@ def checksum(parser, args):
tty.die("Could not find any versions for {0}".format(pkg.name))
version_lines = spack.stage.get_checksums_for_versions(
- url_dict, pkg.name, keep_stage=args.keep_stage)
+ url_dict, pkg.name, keep_stage=args.keep_stage,
+ fetch_options=pkg.fetch_options)
print()
print(version_lines)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index f9b7a382ea..304b531b49 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -245,7 +245,9 @@ class PythonPackageTemplate(PackageTemplate):
base_class_name = 'PythonPackage'
dependencies = """\
- # FIXME: Add dependencies if required.
+ # FIXME: Add dependencies if required. Only add the python dependency
+ # if you need specific versions. A generic python dependency is
+ # added implicity by the PythonPackage class.
# depends_on('python@2.X:2.Y,3.Z:', type=('build', 'run'))
# depends_on('py-setuptools', type='build')
# depends_on('py-foo', type=('build', 'run'))"""
diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py
index 4fc39251af..074e95209a 100644
--- a/lib/spack/spack/cmd/debug.py
+++ b/lib/spack/spack/cmd/debug.py
@@ -3,7 +3,10 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from __future__ import print_function
+
import os
+import platform
import re
from datetime import datetime
from glob import glob
@@ -11,7 +14,9 @@ from glob import glob
import llnl.util.tty as tty
from llnl.util.filesystem import working_dir
+import spack.architecture as architecture
import spack.paths
+from spack.main import get_version
from spack.util.executable import which
description = "debugging commands for troubleshooting Spack"
@@ -23,6 +28,7 @@ def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command')
sp.add_parser('create-db-tarball',
help="create a tarball of Spack's installation metadata")
+ sp.add_parser('report', help='print information useful for bug reports')
def _debug_tarball_suffix():
@@ -78,6 +84,16 @@ def create_db_tarball(args):
tty.msg('Created %s' % tarball_name)
+def report(args):
+ print('* **Spack:**', get_version())
+ print('* **Python:**', platform.python_version())
+ print('* **Platform:**', architecture.Arch(
+ architecture.platform(), 'frontend', 'frontend'))
+
+
def debug(parser, args):
- action = {'create-db-tarball': create_db_tarball}
+ action = {
+ 'create-db-tarball': create_db_tarball,
+ 'report': report,
+ }
action[args.debug_command](args)
diff --git a/lib/spack/spack/cmd/dependencies.py b/lib/spack/spack/cmd/dependencies.py
index e65e050bfa..7f390341ef 100644
--- a/lib/spack/spack/cmd/dependencies.py
+++ b/lib/spack/spack/cmd/dependencies.py
@@ -9,6 +9,7 @@ from llnl.util.tty.colify import colify
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
+import spack.package
import spack.repo
import spack.store
@@ -52,22 +53,15 @@ def dependencies(parser, args):
else:
spec = specs[0]
-
- if not spec.virtual:
- packages = [spec.package]
- else:
- packages = [
- spack.repo.get(s.name)
- for s in spack.repo.path.providers_for(spec)]
-
- dependencies = set()
- for pkg in packages:
- possible = pkg.possible_dependencies(
- args.transitive, args.expand_virtuals, deptype=args.deptype)
- dependencies.update(possible)
+ dependencies = spack.package.possible_dependencies(
+ spec,
+ transitive=args.transitive,
+ expand_virtuals=args.expand_virtuals,
+ deptype=args.deptype
+ )
if spec.name in dependencies:
- dependencies.remove(spec.name)
+ del dependencies[spec.name]
if dependencies:
colify(sorted(dependencies))
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index e60733f589..89fd15ffda 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -30,7 +30,7 @@ def setup_parser(subparser):
def inverted_dependencies():
"""Iterate through all packages and return a dictionary mapping package
- names to possible dependnecies.
+ names to possible dependencies.
Virtual packages are included as sources, so that you can query
dependents of, e.g., `mpi`, but virtuals are not included as
diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py
index 09f3fd31ee..3ef485941f 100644
--- a/lib/spack/spack/cmd/load.py
+++ b/lib/spack/spack/cmd/load.py
@@ -34,6 +34,14 @@ def setup_parser(subparser):
help="print csh commands to load the package")
subparser.add_argument(
+ '--first',
+ action='store_true',
+ default=False,
+ dest='load_first',
+ help="load the first match if multiple packages match the spec"
+ )
+
+ subparser.add_argument(
'--only',
default='package,dependencies',
dest='things_to_load',
@@ -47,10 +55,11 @@ the dependencies"""
def load(parser, args):
env = ev.get_env(args, 'load')
- specs = [spack.cmd.disambiguate_spec(spec, env)
+ specs = [spack.cmd.disambiguate_spec(spec, env, first=args.load_first)
for spec in spack.cmd.parse_specs(args.specs)]
if not args.shell:
+ specs_string = ' '.join(args.specs)
msg = [
"This command works best with Spack's shell support",
""
@@ -58,8 +67,8 @@ def load(parser, args):
'Or, if you want to use `spack load` without initializing',
'shell support, you can run one of these:',
'',
- ' eval `spack load --sh %s` # for bash/sh' % args.specs,
- ' eval `spack load --csh %s` # for csh/tcsh' % args.specs,
+ ' eval `spack load --sh %s` # for bash/sh' % specs_string,
+ ' eval `spack load --csh %s` # for csh/tcsh' % specs_string,
]
tty.msg(*msg)
return 1
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index 5206927895..1473550a56 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -45,7 +45,10 @@ def setup_parser(subparser):
" (this requires significant time and space)")
create_parser.add_argument(
'-f', '--file', help="file with specs of packages to put in mirror")
-
+ create_parser.add_argument(
+ '--skip-unstable-versions', action='store_true',
+ help="don't cache versions unless they identify a stable (unchanging)"
+ " source code")
create_parser.add_argument(
'-D', '--dependencies', action='store_true',
help="also fetch all dependencies")
@@ -308,7 +311,8 @@ def mirror_create(args):
existed = web_util.url_exists(directory)
# Actually do the work to create the mirror
- present, mirrored, error = spack.mirror.create(directory, mirror_specs)
+ present, mirrored, error = spack.mirror.create(
+ directory, mirror_specs, args.skip_unstable_versions)
p, m, e = len(present), len(mirrored), len(error)
verb = "updated" if existed else "created"
diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py
index 2f2290aad8..a91f3663fb 100644
--- a/lib/spack/spack/cmd/python.py
+++ b/lib/spack/spack/cmd/python.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from __future__ import print_function
+
import os
import sys
import code
@@ -21,6 +23,9 @@ level = "long"
def setup_parser(subparser):
subparser.add_argument(
+ '-V', '--version', action='store_true',
+ help='print the Python version number and exit')
+ subparser.add_argument(
'-c', dest='python_command', help='command to execute')
subparser.add_argument(
'-m', dest='module', action='store',
@@ -31,6 +36,10 @@ def setup_parser(subparser):
def python(parser, args, unknown_args):
+ if args.version:
+ print('Python', platform.python_version())
+ return
+
if args.module:
sys.argv = ['spack-python'] + unknown_args + args.python_args
runpy.run_module(args.module, run_name="__main__", alter_sys=True)
diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py
index 83acf796a2..f271790994 100644
--- a/lib/spack/spack/cmd/repo.py
+++ b/lib/spack/spack/cmd/repo.py
@@ -8,10 +8,9 @@ from __future__ import print_function
import os
import llnl.util.tty as tty
-
-import spack.spec
import spack.config
-from spack.repo import Repo, create_repo, canonicalize_path, RepoError
+import spack.repo
+import spack.util.path
description = "manage package source repositories"
section = "config"
@@ -61,7 +60,9 @@ def setup_parser(subparser):
def repo_create(args):
"""Create a new package repository."""
- full_path, namespace = create_repo(args.directory, args.namespace)
+ full_path, namespace = spack.repo.create_repo(
+ args.directory, args.namespace
+ )
tty.msg("Created repo with namespace '%s'." % namespace)
tty.msg("To register it with spack, run this command:",
'spack repo add %s' % full_path)
@@ -72,7 +73,7 @@ def repo_add(args):
path = args.path
# real_path is absolute and handles substitution.
- canon_path = canonicalize_path(path)
+ canon_path = spack.util.path.canonicalize_path(path)
# check if the path exists
if not os.path.exists(canon_path):
@@ -83,7 +84,7 @@ def repo_add(args):
tty.die("Not a Spack repository: %s" % path)
# Make sure it's actually a spack repository by constructing it.
- repo = Repo(canon_path)
+ repo = spack.repo.Repo(canon_path)
# If that succeeds, finally add it to the configuration.
repos = spack.config.get('repos', scope=args.scope)
@@ -104,9 +105,9 @@ def repo_remove(args):
namespace_or_path = args.namespace_or_path
# If the argument is a path, remove that repository from config.
- canon_path = canonicalize_path(namespace_or_path)
+ canon_path = spack.util.path.canonicalize_path(namespace_or_path)
for repo_path in repos:
- repo_canon_path = canonicalize_path(repo_path)
+ repo_canon_path = spack.util.path.canonicalize_path(repo_path)
if canon_path == repo_canon_path:
repos.remove(repo_path)
spack.config.set('repos', repos, args.scope)
@@ -116,14 +117,14 @@ def repo_remove(args):
# If it is a namespace, remove corresponding repo
for path in repos:
try:
- repo = Repo(path)
+ repo = spack.repo.Repo(path)
if repo.namespace == namespace_or_path:
repos.remove(path)
spack.config.set('repos', repos, args.scope)
tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace))
return
- except RepoError:
+ except spack.repo.RepoError:
continue
tty.die("No repository with path or namespace: %s"
@@ -136,8 +137,8 @@ def repo_list(args):
repos = []
for r in roots:
try:
- repos.append(Repo(r))
- except RepoError:
+ repos.append(spack.repo.Repo(r))
+ except spack.repo.RepoError:
continue
msg = "%d package repositor" % len(repos)
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index 36a2704eac..9fc7aa51a2 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -250,13 +250,13 @@ class Compiler(object):
PrgEnv_compiler = None
def __init__(self, cspec, operating_system, target,
- paths, modules=[], alias=None, environment=None,
+ paths, modules=None, alias=None, environment=None,
extra_rpaths=None, enable_implicit_rpaths=None,
**kwargs):
self.spec = cspec
self.operating_system = str(operating_system)
self.target = target
- self.modules = modules
+ self.modules = modules or []
self.alias = alias
self.extra_rpaths = extra_rpaths
self.enable_implicit_rpaths = enable_implicit_rpaths
@@ -317,6 +317,10 @@ class Compiler(object):
first_compiler = next((c for c in paths if c), None)
if not first_compiler:
return []
+ if not cls.verbose_flag():
+ # In this case there is no mechanism to learn what link directories
+ # are used by the compiler
+ return []
try:
tmpdir = tempfile.mkdtemp(prefix='spack-implicit-link-info')
diff --git a/lib/spack/spack/compilers/arm.py b/lib/spack/spack/compilers/arm.py
index ffce1e2b01..ca17ff42e8 100644
--- a/lib/spack/spack/compilers/arm.py
+++ b/lib/spack/spack/compilers/arm.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.compiler
+import re
class Arm(spack.compiler.Compiler):
@@ -35,7 +36,20 @@ class Arm(spack.compiler.Compiler):
# InstalledDir:
# /opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin
version_argument = '--version'
- version_regex = r'Arm C\/C\+\+\/Fortran Compiler version ([^ )]+)'
+ version_regex = r'Arm C\/C\+\+\/Fortran Compiler version ([\d\.]+) '\
+ r'\(build number (\d+)\) '
+
+ @classmethod
+ def extract_version_from_output(cls, output):
+ """Extracts the version from compiler's output."""
+ match = re.search(cls.version_regex, output)
+ temp = 'unknown'
+ if match:
+ if match.group(1).count('.') == 1:
+ temp = match.group(1) + ".0." + match.group(2)
+ else:
+ temp = match.group(1) + "." + match.group(2)
+ return temp
@classmethod
def verbose_flag(cls):
diff --git a/lib/spack/spack/compilers/fj.py b/lib/spack/spack/compilers/fj.py
index 083fe35b0d..1f7d52f026 100644
--- a/lib/spack/spack/compilers/fj.py
+++ b/lib/spack/spack/compilers/fj.py
@@ -61,7 +61,3 @@ class Fj(spack.compiler.Compiler):
@property
def pic_flag(self):
return "-KPIC"
-
- def setup_custom_environment(self, pkg, env):
- env.append_flags('fcc_ENV', '-Nclang')
- env.append_flags('FCC_ENV', '-Nclang')
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index b1c0ad73c7..445d62d2ab 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -97,6 +97,7 @@ configuration_paths = (
config_defaults = {
'config': {
'debug': False,
+ 'connect_timeout': 10,
'verify_ssl': True,
'checksum': True,
'dirty': False,
@@ -279,6 +280,7 @@ class InternalConfigScope(ConfigScope):
self.sections = syaml.syaml_dict()
if data:
+ data = InternalConfigScope._process_dict_keyname_overrides(data)
for section in data:
dsec = data[section]
validate({section: dsec}, section_schemas[section])
@@ -305,6 +307,25 @@ class InternalConfigScope(ConfigScope):
def __repr__(self):
return '<InternalConfigScope: %s>' % self.name
+ @staticmethod
+ def _process_dict_keyname_overrides(data):
+ """Turn a trailing `:' in a key name into an override attribute."""
+ result = {}
+ for sk, sv in iteritems(data):
+ if sk.endswith(':'):
+ key = syaml.syaml_str(sk[:-1])
+ key.override = True
+ else:
+ key = sk
+
+ if isinstance(sv, dict):
+ result[key]\
+ = InternalConfigScope._process_dict_keyname_overrides(sv)
+ else:
+ result[key] = copy.copy(sv)
+
+ return result
+
class Configuration(object):
"""A full Spack configuration, from a hierarchy of config files.
@@ -504,14 +525,14 @@ class Configuration(object):
Accepts the path syntax described in ``get()``.
"""
- section, _, rest = path.partition(':')
+ parts = _process_config_path(path)
+ section = parts.pop(0)
- if not rest:
+ if not parts:
self.update_config(section, value, scope=scope)
else:
section_data = self.get_config(section, scope=scope)
- parts = rest.split(':')
data = section_data
while len(parts) > 1:
key = parts.pop(0)
@@ -611,7 +632,7 @@ def _config():
"""Singleton Configuration instance.
This constructs one instance associated with this module and returns
- it. It is bundled inside a function so that configuratoin can be
+ it. It is bundled inside a function so that configuration can be
initialized lazily.
Return:
@@ -762,17 +783,12 @@ def _merge_yaml(dest, source):
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
-
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
- # If both are None, handle specially and return None.
- if source is None and dest is None:
- return None
-
# If source is None, overwrite with source.
- elif source is None:
+ if source is None:
return None
# Source list is prepended (for precedence)
@@ -798,8 +814,9 @@ def _merge_yaml(dest, source):
# to copy mark information on source keys to dest.
key_marks[sk] = sk
- # ensure that keys are marked in the destination. the key_marks dict
- # ensures we can get the actual source key objects from dest keys
+ # ensure that keys are marked in the destination. The
+ # key_marks dict ensures we can get the actual source key
+ # objects from dest keys
for dk in list(dest.keys()):
if dk in key_marks and syaml.markable(dk):
syaml.mark(dk, key_marks[dk])
@@ -811,9 +828,34 @@ def _merge_yaml(dest, source):
return dest
- # In any other case, overwrite with a copy of the source value.
- else:
- return copy.copy(source)
+ # If we reach here source and dest are either different types or are
+ # not both lists or dicts: replace with source.
+ return copy.copy(source)
+
+
+#
+# Process a path argument to config.set() that may contain overrides ('::' or
+# trailing ':')
+#
+def _process_config_path(path):
+ result = []
+ if path.startswith(':'):
+ raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".
+ format(path), '')
+ seen_override_in_path = False
+ while path:
+ front, sep, path = path.partition(':')
+ if (sep and not path) or path.startswith(':'):
+ if seen_override_in_path:
+ raise syaml.SpackYAMLError("Meaningless second override"
+ " indicator `::' in path `{0}'".
+ format(path), '')
+ path = path.lstrip(':')
+ front = syaml.syaml_str(front)
+ front.override = True
+ seen_override_in_path = True
+ result.append(front)
+ return result
#
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index a01bc143aa..d7613ae58a 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -256,7 +256,7 @@ class URLFetchStrategy(FetchStrategy):
self.digest = kwargs[h]
self.expand_archive = kwargs.get('expand', True)
- self.extra_curl_options = kwargs.get('curl_options', [])
+ self.extra_options = kwargs.get('fetch_options', {})
self._curl = None
self.extension = kwargs.get('extension', None)
@@ -325,8 +325,6 @@ class URLFetchStrategy(FetchStrategy):
'-D',
'-', # print out HTML headers
'-L', # resolve 3xx redirects
- # Timeout if can't establish a connection after 10 sec.
- '--connect-timeout', '10',
url,
]
@@ -338,7 +336,22 @@ class URLFetchStrategy(FetchStrategy):
else:
curl_args.append('-sS') # just errors when not.
- curl_args += self.extra_curl_options
+ connect_timeout = spack.config.get('config:connect_timeout')
+
+ if self.extra_options:
+ cookie = self.extra_options.get('cookie')
+ if cookie:
+ curl_args.append('-j') # junk cookies
+ curl_args.append('-b') # specify cookie
+ curl_args.append(cookie)
+
+ timeout = self.extra_options.get('timeout')
+ if timeout:
+ connect_timeout = max(connect_timeout, int(timeout))
+
+ if connect_timeout > 0:
+ # Timeout if can't establish a connection after n sec.
+ curl_args.extend(['--connect-timeout', str(connect_timeout)])
# Run curl but grab the mime type from the http headers
curl = self.curl
@@ -1148,6 +1161,15 @@ class S3FetchStrategy(URLFetchStrategy):
raise FailedDownloadError(self.url)
+def stable_target(fetcher):
+ """Returns whether the fetcher target is expected to have a stable
+ checksum. This is only true if the target is a preexisting archive
+ file."""
+ if isinstance(fetcher, URLFetchStrategy) and fetcher.cachable:
+ return True
+ return False
+
+
def from_url(url):
"""Given a URL, find an appropriate fetch strategy for it.
Currently just gives you a URLFetchStrategy that uses curl.
@@ -1225,7 +1247,8 @@ def _check_version_attributes(fetcher, pkg, version):
def _extrapolate(pkg, version):
"""Create a fetcher from an extrapolated URL for this version."""
try:
- return URLFetchStrategy(pkg.url_for_version(version))
+ return URLFetchStrategy(pkg.url_for_version(version),
+ fetch_options=pkg.fetch_options)
except spack.package.NoURLError:
msg = ("Can't extrapolate a URL for version %s "
"because package %s defines no URLs")
@@ -1245,6 +1268,7 @@ def _from_merged_attrs(fetcher, pkg, version):
url = getattr(pkg, fetcher.url_attr)
attrs = {fetcher.url_attr: url}
+ attrs['fetch_options'] = pkg.fetch_options
attrs.update(pkg.versions[version])
return fetcher(**attrs)
@@ -1267,8 +1291,10 @@ def for_package_version(pkg, version):
if version not in pkg.versions:
return _extrapolate(pkg, version)
+ # Set package args first so version args can override them
+ args = {'fetch_options': pkg.fetch_options}
# Grab a dict of args out of the package version dict
- args = pkg.versions[version]
+ args.update(pkg.versions[version])
# If the version specifies a `url_attr` directly, use that.
for fetcher in all_strategies:
@@ -1348,7 +1374,8 @@ def from_list_url(pkg):
args.get('checksum'))
# construct a fetcher
- return URLFetchStrategy(url_from_list, checksum)
+ return URLFetchStrategy(url_from_list, checksum,
+ fetch_options=pkg.fetch_options)
except KeyError as e:
tty.debug(e)
tty.msg("Cannot find version %s in url_list" % pkg.version)
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 3f6b2b6a0e..045ca5ffec 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -401,7 +401,7 @@ def get_matching_versions(specs, num_versions=1):
return matching
-def create(path, specs):
+def create(path, specs, skip_unstable_versions=False):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
@@ -409,6 +409,9 @@ def create(path, specs):
path: Path to create a mirror directory hierarchy in.
specs: Any package versions matching these specs will be added \
to the mirror.
+ skip_unstable_versions: if true, this skips adding resources when
+ they do not have a stable archive checksum (as determined by
+ ``fetch_strategy.stable_target``)
Return Value:
Returns a tuple of lists: (present, mirrored, error)
@@ -440,16 +443,14 @@ def create(path, specs):
raise MirrorError(
"Cannot create directory '%s':" % mirror_root, str(e))
- mirror_cache = spack.caches.MirrorCache(mirror_root)
+ mirror_cache = spack.caches.MirrorCache(
+ mirror_root, skip_unstable_versions=skip_unstable_versions)
mirror_stats = MirrorStats()
- try:
- spack.caches.mirror_cache = mirror_cache
- # Iterate through packages and download all safe tarballs for each
- for spec in specs:
- mirror_stats.next_spec(spec)
- add_single_spec(spec, mirror_root, mirror_stats)
- finally:
- spack.caches.mirror_cache = None
+
+ # Iterate through packages and download all safe tarballs for each
+ for spec in specs:
+ mirror_stats.next_spec(spec)
+ _add_single_spec(spec, mirror_cache, mirror_stats)
return mirror_stats.stats()
@@ -495,7 +496,7 @@ class MirrorStats(object):
self.errors.add(self.current_spec)
-def add_single_spec(spec, mirror_root, mirror_stats):
+def _add_single_spec(spec, mirror, mirror_stats):
tty.msg("Adding package {pkg} to mirror".format(
pkg=spec.format("{name}{@version}")
))
@@ -503,10 +504,10 @@ def add_single_spec(spec, mirror_root, mirror_stats):
while num_retries > 0:
try:
with spec.package.stage as pkg_stage:
- pkg_stage.cache_mirror(mirror_stats)
+ pkg_stage.cache_mirror(mirror, mirror_stats)
for patch in spec.package.all_patches():
- if patch.cache():
- patch.cache().cache_mirror(mirror_stats)
+ if patch.stage:
+ patch.stage.cache_mirror(mirror, mirror_stats)
patch.clean()
exception = None
break
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index b2d841f145..b8ded0364b 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -477,6 +477,9 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
#: This is currently only used by package sanity tests.
manual_download = False
+ #: Set of additional options used when fetching package versions.
+ fetch_options = {}
+
#
# Set default licensing information
#
@@ -602,11 +605,10 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
deptype = spack.dependency.canonical_deptype(deptype)
- if visited is None:
- visited = {cls.name: set()}
+ visited = {} if visited is None else visited
+ missing = {} if missing is None else missing
- if missing is None:
- missing = {cls.name: set()}
+ visited.setdefault(cls.name, set())
for name, conditions in cls.dependencies.items():
# check whether this dependency could be of the type asked for
@@ -621,6 +623,7 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
providers = spack.repo.path.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
+ visited.setdefault(cls.name, set()).add(name)
visited.setdefault(name, set())
continue
else:
@@ -1033,6 +1036,14 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
)
@property
+ def virtuals_provided(self):
+ """
+ virtual packages provided by this package with its spec
+ """
+ return [vspec for vspec, constraints in self.provided.items()
+ if any(self.spec.satisfies(c) for c in constraints)]
+
+ @property
def installed(self):
"""Installation status of a package.
@@ -1135,8 +1146,8 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
for patch in self.spec.patches:
patch.fetch()
- if patch.cache():
- patch.cache().cache_local()
+ if patch.stage:
+ patch.stage.cache_local()
def do_stage(self, mirror_only=False):
"""Unpacks and expands the fetched tarball."""
@@ -2151,26 +2162,27 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
See ``PackageBase.possible_dependencies`` for details.
"""
- transitive = kwargs.get('transitive', True)
- expand_virtuals = kwargs.get('expand_virtuals', True)
- deptype = kwargs.get('deptype', 'all')
- missing = kwargs.get('missing')
-
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta):
- pkg = pos
- elif isinstance(pos, spack.spec.Spec):
- pkg = pos.package
- else:
- pkg = spack.spec.Spec(pos).package
+ packages.append(pos)
+ continue
+
+ if not isinstance(pos, spack.spec.Spec):
+ pos = spack.spec.Spec(pos)
- packages.append(pkg)
+ if spack.repo.path.is_virtual(pos.name):
+ packages.extend(
+ p.package_class
+ for p in spack.repo.path.providers_for(pos.name)
+ )
+ continue
+ else:
+ packages.append(pos.package_class)
visited = {}
for pkg in packages:
- pkg.possible_dependencies(
- transitive, expand_virtuals, deptype, visited, missing)
+ pkg.possible_dependencies(visited=visited, **kwargs)
return visited
diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py
index 2801a6d123..0158c7063a 100644
--- a/lib/spack/spack/package_prefs.py
+++ b/lib/spack/spack/package_prefs.py
@@ -6,7 +6,6 @@
import stat
from six import string_types
-from six import iteritems
import spack.repo
import spack.error
@@ -23,27 +22,6 @@ def _spec_type(component):
return _lesser_spec_types.get(component, spack.spec.Spec)
-def get_packages_config():
- """Wrapper around get_packages_config() to validate semantics."""
- config = spack.config.get('packages')
-
- # Get a list of virtuals from packages.yaml. Note that because we
- # check spack.repo, this collects virtuals that are actually provided
- # by sometihng, not just packages/names that don't exist.
- # So, this won't include, e.g., 'all'.
- virtuals = [(pkg_name, pkg_name._start_mark) for pkg_name in config
- if spack.repo.path.is_virtual(pkg_name)]
-
- # die if there are virtuals in `packages.py`
- if virtuals:
- errors = ["%s: %s" % (line_info, name) for name, line_info in virtuals]
- raise VirtualInPackagesYAMLError(
- "packages.yaml entries cannot be virtual packages:",
- '\n'.join(errors))
-
- return config
-
-
class PackagePrefs(object):
"""Defines the sort order for a set of specs.
@@ -116,7 +94,7 @@ class PackagePrefs(object):
pkglist.append('all')
for pkg in pkglist:
- pkg_entry = get_packages_config().get(pkg)
+ pkg_entry = spack.config.get('packages').get(pkg)
if not pkg_entry:
continue
@@ -160,7 +138,8 @@ class PackagePrefs(object):
def preferred_variants(cls, pkg_name):
"""Return a VariantMap of preferred variants/values for a spec."""
for pkg in (pkg_name, 'all'):
- variants = get_packages_config().get(pkg, {}).get('variants', '')
+ variants = spack.config.get('packages').get(pkg, {}).get(
+ 'variants', '')
if variants:
break
@@ -181,33 +160,29 @@ def spec_externals(spec):
# break circular import.
from spack.util.module_cmd import get_path_from_module # NOQA: ignore=F401
- allpkgs = get_packages_config()
- name = spec.name
+ allpkgs = spack.config.get('packages')
+ names = set([spec.name])
+ names |= set(vspec.name for vspec in spec.package.virtuals_provided)
external_specs = []
- pkg_paths = allpkgs.get(name, {}).get('paths', None)
- pkg_modules = allpkgs.get(name, {}).get('modules', None)
- if (not pkg_paths) and (not pkg_modules):
- return []
-
- for external_spec, path in iteritems(pkg_paths):
- if not path:
- # skip entries without paths (avoid creating extra Specs)
+ for name in names:
+ pkg_config = allpkgs.get(name, {})
+ pkg_paths = pkg_config.get('paths', {})
+ pkg_modules = pkg_config.get('modules', {})
+ if (not pkg_paths) and (not pkg_modules):
continue
- external_spec = spack.spec.Spec(external_spec,
- external_path=canonicalize_path(path))
- if external_spec.satisfies(spec):
- external_specs.append(external_spec)
-
- for external_spec, module in iteritems(pkg_modules):
- if not module:
- continue
+ for external_spec, path in pkg_paths.items():
+ external_spec = spack.spec.Spec(
+ external_spec, external_path=canonicalize_path(path))
+ if external_spec.satisfies(spec):
+ external_specs.append(external_spec)
- external_spec = spack.spec.Spec(
- external_spec, external_module=module)
- if external_spec.satisfies(spec):
- external_specs.append(external_spec)
+ for external_spec, module in pkg_modules.items():
+ external_spec = spack.spec.Spec(
+ external_spec, external_module=module)
+ if external_spec.satisfies(spec):
+ external_specs.append(external_spec)
# defensively copy returned specs
return [s.copy() for s in external_specs]
@@ -215,12 +190,11 @@ def spec_externals(spec):
def is_spec_buildable(spec):
"""Return true if the spec pkgspec is configured as buildable"""
- allpkgs = get_packages_config()
- if spec.name not in allpkgs:
- return True
- if 'buildable' not in allpkgs[spec.name]:
- return True
- return allpkgs[spec.name]['buildable']
+ allpkgs = spack.config.get('packages')
+ do_not_build = [name for name, entry in allpkgs.items()
+ if not entry.get('buildable', True)]
+ return not (spec.name in do_not_build or
+ any(spec.package.provides(name) for name in do_not_build))
def get_package_dir_permissions(spec):
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index bcb45387a8..3a839c5b0f 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -85,7 +85,8 @@ class Patch(object):
apply_patch(stage, self.path, self.level, self.working_dir)
- def cache(self):
+ @property
+ def stage(self):
return None
def to_dict(self):
@@ -248,9 +249,6 @@ class UrlPatch(Patch):
self._stage.create()
return self._stage
- def cache(self):
- return self.stage
-
def clean(self):
self.stage.destroy()
@@ -348,7 +346,8 @@ class PatchCache(object):
sha_index = self.index.get(sha256)
if not sha_index:
raise NoSuchPatchError(
- "Couldn't find patch with sha256: %s" % sha256)
+ "Couldn't find patch for package %s with sha256: %s"
+ % (pkg.fullname, sha256))
patch_dict = sha_index.get(pkg.fullname)
if not patch_dict:
diff --git a/lib/spack/spack/pkgkit.py b/lib/spack/spack/pkgkit.py
index c304fb4fca..5c9c01ff9f 100644
--- a/lib/spack/spack/pkgkit.py
+++ b/lib/spack/spack/pkgkit.py
@@ -31,6 +31,8 @@ from spack.build_systems.intel import IntelPackage
from spack.build_systems.meson import MesonPackage
from spack.build_systems.sip import SIPPackage
from spack.build_systems.gnu import GNUMirrorPackage
+from spack.build_systems.sourceware import SourcewarePackage
+from spack.build_systems.xorg import XorgPackage
from spack.mixins import filter_compiler_wrappers
diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py
index 9bf4af8911..326f6aa8f1 100644
--- a/lib/spack/spack/provider_index.py
+++ b/lib/spack/spack/provider_index.py
@@ -2,54 +2,147 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+"""Classes and functions to manage providers of virtual dependencies"""
+import itertools
-"""
-The ``virtual`` module contains utility classes for virtual dependencies.
-"""
-
-from itertools import product as iproduct
-from six import iteritems
-from pprint import pformat
-
+import six
import spack.error
import spack.util.spack_json as sjson
-class ProviderIndex(object):
- """This is a dict of dicts used for finding providers of particular
- virtual dependencies. The dict of dicts looks like:
+def _cross_provider_maps(lmap, rmap):
+ """Return a dictionary that combines constraint requests from both input.
- { vpkg name :
- { full vpkg spec : set(packages providing spec) } }
+ Args:
+ lmap: main provider map
+ rmap: provider map with additional constraints
+ """
+ # TODO: this is pretty darned nasty, and inefficient, but there
+ # TODO: are not that many vdeps in most specs.
+ result = {}
+ for lspec, rspec in itertools.product(lmap, rmap):
+ try:
+ constrained = lspec.constrained(rspec)
+ except spack.error.UnsatisfiableSpecError:
+ continue
+
+ # lp and rp are left and right provider specs.
+ for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
+ if lp_spec.name == rp_spec.name:
+ try:
+ const = lp_spec.constrained(rp_spec, deps=False)
+ result.setdefault(constrained, set()).add(const)
+ except spack.error.UnsatisfiableSpecError:
+ continue
+ return result
+
+
+class _IndexBase(object):
+ #: This is a dict of dicts used for finding providers of particular
+ #: virtual dependencies. The dict of dicts looks like:
+ #:
+ #: { vpkg name :
+ #: { full vpkg spec : set(packages providing spec) } }
+ #:
+ #: Callers can use this to first find which packages provide a vpkg,
+ #: then find a matching full spec. e.g., in this scenario:
+ #:
+ #: { 'mpi' :
+ #: { mpi@:1.1 : set([mpich]),
+ #: mpi@:2.3 : set([mpich2@1.9:]) } }
+ #:
+ #: Calling providers_for(spec) will find specs that provide a
+ #: matching implementation of MPI. Derived class need to construct
+ #: this attribute according to the semantics above.
+ providers = None
+
+ def providers_for(self, virtual_spec):
+ """Return a list of specs of all packages that provide virtual
+ packages with the supplied spec.
+
+ Args:
+ virtual_spec: virtual spec to be provided
+ """
+ result = set()
+ # Allow string names to be passed as input, as well as specs
+ if isinstance(virtual_spec, six.string_types):
+ virtual_spec = spack.spec.Spec(virtual_spec)
- Callers can use this to first find which packages provide a vpkg,
- then find a matching full spec. e.g., in this scenario:
+ # Add all the providers that satisfy the vpkg spec.
+ if virtual_spec.name in self.providers:
+ for p_spec, spec_set in self.providers[virtual_spec.name].items():
+ if p_spec.satisfies(virtual_spec, deps=False):
+ result.update(spec_set)
- { 'mpi' :
- { mpi@:1.1 : set([mpich]),
- mpi@:2.3 : set([mpich2@1.9:]) } }
+ # Return providers in order. Defensively copy.
+ return sorted(s.copy() for s in result)
- Calling providers_for(spec) will find specs that provide a
- matching implementation of MPI.
+ def __contains__(self, name):
+ return name in self.providers
- """
+ def satisfies(self, other):
+ """Determine if the providers of virtual specs are compatible.
- def __init__(self, specs=None, restrict=False):
- """Create a new ProviderIndex.
+ Args:
+ other: another provider index
+
+ Returns:
+ True if the providers are compatible, False otherwise.
+ """
+ common = set(self.providers) & set(other.providers)
+ if not common:
+ return True
+
+ # This ensures that some provider in other COULD satisfy the
+ # vpkg constraints on self.
+ result = {}
+ for name in common:
+ crossed = _cross_provider_maps(
+ self.providers[name], other.providers[name]
+ )
+ if crossed:
+ result[name] = crossed
+
+ return all(c in result for c in common)
+
+ def __eq__(self, other):
+ return self.providers == other.providers
- Optional arguments:
+ def _transform(self, transform_fun, out_mapping_type=dict):
+ """Transform this provider index dictionary and return it.
+
+ Args:
+ transform_fun: transform_fun takes a (vpkg, pset) mapping and runs
+ it on each pair in nested dicts.
+ out_mapping_type: type to be used internally on the
+ transformed (vpkg, pset)
+
+ Returns:
+ Transformed mapping
+ """
+ return _transform(self.providers, transform_fun, out_mapping_type)
+
+ def __str__(self):
+ return str(self.providers)
- specs
- List (or sequence) of specs. If provided, will call
- `update` on this ProviderIndex with each spec in the list.
+ def __repr__(self):
+ return repr(self.providers)
- restrict
- "restricts" values to the verbatim input specs; do not
- pre-apply package's constraints.
- TODO: rename this. It is intended to keep things as broad
- as possible without overly restricting results, so it is
- not the best name.
+class ProviderIndex(_IndexBase):
+ def __init__(self, specs=None, restrict=False):
+ """Provider index based on a single mapping of providers.
+
+ Args:
+ specs (list of specs): if provided, will call update on each
+ single spec to initialize this provider index.
+
+ restrict: "restricts" values to the verbatim input specs; do not
+ pre-apply package's constraints.
+
+ TODO: rename this. It is intended to keep things as broad
+ TODO: as possible without overly restricting results, so it is
+ TODO: not the best name.
"""
if specs is None:
specs = []
@@ -67,6 +160,11 @@ class ProviderIndex(object):
self.update(spec)
def update(self, spec):
+ """Update the provider index with additional virtual specs.
+
+ Args:
+ spec: spec potentially providing additional virtual specs
+ """
if not isinstance(spec, spack.spec.Spec):
spec = spack.spec.Spec(spec)
@@ -74,10 +172,10 @@ class ProviderIndex(object):
# Empty specs do not have a package
return
- assert(not spec.virtual)
+ assert not spec.virtual, "cannot update an index using a virtual spec"
pkg_provided = spec.package_class.provided
- for provided_spec, provider_specs in iteritems(pkg_provided):
+ for provided_spec, provider_specs in six.iteritems(pkg_provided):
for provider_spec in provider_specs:
# TODO: fix this comment.
# We want satisfaction other than flags
@@ -110,94 +208,24 @@ class ProviderIndex(object):
constrained.constrain(provider_spec)
provider_map[provided_spec].add(constrained)
- def providers_for(self, *vpkg_specs):
- """Gives specs of all packages that provide virtual packages
- with the supplied specs."""
- providers = set()
- for vspec in vpkg_specs:
- # Allow string names to be passed as input, as well as specs
- if type(vspec) == str:
- vspec = spack.spec.Spec(vspec)
-
- # Add all the providers that satisfy the vpkg spec.
- if vspec.name in self.providers:
- for p_spec, spec_set in self.providers[vspec.name].items():
- if p_spec.satisfies(vspec, deps=False):
- providers.update(spec_set)
-
- # Return providers in order. Defensively copy.
- return sorted(s.copy() for s in providers)
-
- # TODO: this is pretty darned nasty, and inefficient, but there
- # are not that many vdeps in most specs.
- def _cross_provider_maps(self, lmap, rmap):
- result = {}
- for lspec, rspec in iproduct(lmap, rmap):
- try:
- constrained = lspec.constrained(rspec)
- except spack.error.UnsatisfiableSpecError:
- continue
-
- # lp and rp are left and right provider specs.
- for lp_spec, rp_spec in iproduct(lmap[lspec], rmap[rspec]):
- if lp_spec.name == rp_spec.name:
- try:
- const = lp_spec.constrained(rp_spec, deps=False)
- result.setdefault(constrained, set()).add(const)
- except spack.error.UnsatisfiableSpecError:
- continue
- return result
-
- def __contains__(self, name):
- """Whether a particular vpkg name is in the index."""
- return name in self.providers
-
- def satisfies(self, other):
- """Check that providers of virtual specs are compatible."""
- common = set(self.providers) & set(other.providers)
- if not common:
- return True
-
- # This ensures that some provider in other COULD satisfy the
- # vpkg constraints on self.
- result = {}
- for name in common:
- crossed = self._cross_provider_maps(self.providers[name],
- other.providers[name])
- if crossed:
- result[name] = crossed
-
- return all(c in result for c in common)
-
def to_json(self, stream=None):
+ """Dump a JSON representation of this object.
+
+ Args:
+ stream: stream where to dump
+ """
provider_list = self._transform(
lambda vpkg, pset: [
vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list)
sjson.dump({'provider_index': {'providers': provider_list}}, stream)
- @staticmethod
- def from_json(stream):
- data = sjson.load(stream)
-
- if not isinstance(data, dict):
- raise ProviderIndexError("JSON ProviderIndex data was not a dict.")
-
- if 'provider_index' not in data:
- raise ProviderIndexError(
- "YAML ProviderIndex does not start with 'provider_index'")
-
- index = ProviderIndex()
- providers = data['provider_index']['providers']
- index.providers = _transform(
- providers,
- lambda vpkg, plist: (
- spack.spec.Spec.from_node_dict(vpkg),
- set(spack.spec.Spec.from_node_dict(p) for p in plist)))
- return index
-
def merge(self, other):
- """Merge `other` ProviderIndex into this one."""
+ """Merge another provider index into this one.
+
+ Args:
+ other (ProviderIndex): provider index to be merged
+ """
other = other.copy() # defensive copy.
for pkg in other.providers:
@@ -236,40 +264,61 @@ class ProviderIndex(object):
del self.providers[pkg]
def copy(self):
- """Deep copy of this ProviderIndex."""
+ """Return a deep copy of this index."""
clone = ProviderIndex()
clone.providers = self._transform(
lambda vpkg, pset: (vpkg, set((p.copy() for p in pset))))
return clone
- def __eq__(self, other):
- return self.providers == other.providers
+ @staticmethod
+ def from_json(stream):
+ """Construct a provider index from its JSON representation.
- def _transform(self, transform_fun, out_mapping_type=dict):
- return _transform(self.providers, transform_fun, out_mapping_type)
+ Args:
+ stream: stream where to read from the JSON data
+ """
+ data = sjson.load(stream)
- def __str__(self):
- return pformat(
- _transform(self.providers,
- lambda k, v: (k, list(v))))
+ if not isinstance(data, dict):
+ raise ProviderIndexError("JSON ProviderIndex data was not a dict.")
+
+ if 'provider_index' not in data:
+ raise ProviderIndexError(
+ "YAML ProviderIndex does not start with 'provider_index'")
+
+ index = ProviderIndex()
+ providers = data['provider_index']['providers']
+ index.providers = _transform(
+ providers,
+ lambda vpkg, plist: (
+ spack.spec.Spec.from_node_dict(vpkg),
+ set(spack.spec.Spec.from_node_dict(p) for p in plist)))
+ return index
def _transform(providers, transform_fun, out_mapping_type=dict):
"""Syntactic sugar for transforming a providers dict.
- transform_fun takes a (vpkg, pset) mapping and runs it on each
- pair in nested dicts.
+ Args:
+ providers: provider dictionary
+ transform_fun: transform_fun takes a (vpkg, pset) mapping and runs
+ it on each pair in nested dicts.
+ out_mapping_type: type to be used internally on the
+ transformed (vpkg, pset)
+ Returns:
+ Transformed mapping
"""
def mapiter(mappings):
if isinstance(mappings, dict):
- return iteritems(mappings)
+ return six.iteritems(mappings)
else:
return iter(mappings)
return dict(
- (name, out_mapping_type([
- transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)]))
+ (name, out_mapping_type(
+ [transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)]
+ ))
for name, mappings in providers.items())
diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py
index 56fc993b5f..9f8669f3d4 100644
--- a/lib/spack/spack/relocate.py
+++ b/lib/spack/spack/relocate.py
@@ -2,84 +2,99 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
-
import os
+import platform
import re
import shutil
-import platform
-import spack.repo
-import spack.cmd
+
import llnl.util.lang
-from spack.util.executable import Executable, ProcessError
import llnl.util.tty as tty
+import macholib.MachO
+import macholib.mach_o
+import spack.cmd
+import spack.repo
+import spack.spec
+import spack.util.executable as executable
-class InstallRootStringException(spack.error.SpackError):
- """
- Raised when the relocated binary still has the install root string.
- """
-
+class InstallRootStringError(spack.error.SpackError):
def __init__(self, file_path, root_path):
- super(InstallRootStringException, self).__init__(
+ """Signal that the relocated binary still has the original
+ Spack's store root string
+
+ Args:
+ file_path (str): path of the binary
+ root_path (str): original Spack's store root string
+ """
+ super(InstallRootStringError, self).__init__(
"\n %s \ncontains string\n %s \n"
"after replacing it in rpaths.\n"
"Package should not be relocated.\n Use -a to override." %
(file_path, root_path))
-class BinaryStringReplacementException(spack.error.SpackError):
- """
- Raised when the size of the file changes after binary path substitution.
- """
-
+class BinaryStringReplacementError(spack.error.SpackError):
def __init__(self, file_path, old_len, new_len):
- super(BinaryStringReplacementException, self).__init__(
+ """The size of the file changed after binary path substitution
+
+ Args:
+ file_path (str): file with changing size
+ old_len (str): original length of the file
+ new_len (str): length of the file after substitution
+ """
+ super(BinaryStringReplacementError, self).__init__(
"Doing a binary string replacement in %s failed.\n"
"The size of the file changed from %s to %s\n"
"when it should have remanined the same." %
(file_path, old_len, new_len))
-class MissingMacholibException(spack.error.SpackError):
- """
- Raised when the size of the file changes after binary path substitution.
- """
+class BinaryTextReplaceError(spack.error.SpackError):
+ def __init__(self, old_path, new_path):
+ """Raised when the new install path is longer than the
+ old one, so binary text replacement cannot occur.
- def __init__(self, error):
- super(MissingMacholibException, self).__init__(
- "%s\n"
- "Python package macholib needs to be avaiable to list\n"
- "and modify a mach-o binary's rpaths, deps and id.\n"
- "Use virtualenv with pip install macholib or\n"
- "use spack to install the py-macholib package\n"
- "spack install py-macholib\n"
- "spack activate py-macholib\n"
- "spack load python\n"
- % error)
+ Args:
+ old_path (str): original path to be substituted
+ new_path (str): candidate path for substitution
+ """
+ msg = "New path longer than old path: binary text"
+ msg += " replacement not possible."
+ err_msg = "The new path %s" % new_path
+ err_msg += " is longer than the old path %s.\n" % old_path
+ err_msg += "Text replacement in binaries will not work.\n"
+ err_msg += "Create buildcache from an install path "
+ err_msg += "longer than new path."
+ super(BinaryTextReplaceError, self).__init__(msg, err_msg)
-def get_patchelf():
- """
- Builds and installs spack patchelf package on linux platforms
- using the first concretized spec.
- Returns the full patchelf binary path.
+
+def _patchelf():
+ """Return the full path to the patchelf binary, if available, else None.
+
+ Search first the current PATH for patchelf. If not found, try to look
+ if the default patchelf spec is installed and if not install it.
+
+ Return None on Darwin or if patchelf cannot be found.
"""
- # as we may need patchelf, find out where it is
+ # Check if patchelf is already in the PATH
patchelf = spack.util.executable.which('patchelf')
if patchelf is not None:
return patchelf.path
- else:
- if str(spack.architecture.platform()) == 'test':
- return None
- if str(spack.architecture.platform()) == 'darwin':
- return None
- patchelf_spec = spack.cmd.parse_specs("patchelf", concretize=True)[0]
- patchelf = spack.repo.get(patchelf_spec)
- if not patchelf.installed:
- patchelf.do_install(use_cache=False)
- patchelf_executable = os.path.join(patchelf.prefix.bin, "patchelf")
- return patchelf_executable
+
+ # Check if patchelf spec is installed
+ spec = spack.spec.Spec('patchelf').concretized()
+ exe_path = os.path.join(spec.prefix.bin, "patchelf")
+ if spec.package.installed and os.path.exists(exe_path):
+ return exe_path
+
+ # Skip darwin
+ if str(spack.architecture.platform()) == 'darwin':
+ return None
+
+ # Install the spec and return its path
+ spec.package.do_install()
+ return exe_path if os.path.exists(exe_path) else None
def get_existing_elf_rpaths(path_name):
@@ -90,38 +105,58 @@ def get_existing_elf_rpaths(path_name):
# if we're relocating patchelf itself, use it
- if path_name[-13:] == "/bin/patchelf":
- patchelf = Executable(path_name)
+ if path_name.endswith("/bin/patchelf"):
+ patchelf = executable.Executable(path_name)
else:
- patchelf = Executable(get_patchelf())
+ patchelf = executable.Executable(_patchelf())
+ rpaths = list()
try:
output = patchelf('--print-rpath', '%s' %
path_name, output=str, error=str)
- return output.rstrip('\n').split(':')
- except ProcessError as e:
- tty.debug('patchelf --print-rpath produced an error on %s' %
- path_name, e)
- return []
- return
+ rpaths = output.rstrip('\n').split(':')
+ except executable.ProcessError as e:
+ msg = 'patchelf --print-rpath %s produced an error %s' % (path_name, e)
+ tty.warn(msg)
+ return rpaths
-def get_relative_rpaths(path_name, orig_dir, orig_rpaths):
+def get_relative_elf_rpaths(path_name, orig_layout_root, orig_rpaths):
"""
- Replaces orig_dir with relative path from dirname(path_name) if an rpath
- in orig_rpaths contains orig_path. Prefixes $ORIGIN
+ Replaces orig rpath with relative path from dirname(path_name) if an rpath
+ in orig_rpaths contains orig_layout_root. Prefixes $ORIGIN
to relative paths and returns replacement rpaths.
"""
rel_rpaths = []
for rpath in orig_rpaths:
- if re.match(orig_dir, rpath):
+ if re.match(orig_layout_root, rpath):
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
- rel_rpaths.append('$ORIGIN/%s' % rel)
+ rel_rpaths.append(os.path.join('$ORIGIN', '%s' % rel))
else:
rel_rpaths.append(rpath)
return rel_rpaths
+def get_normalized_elf_rpaths(orig_path_name, rel_rpaths):
+ """
+ Normalize the relative rpaths with respect to the original path name
+ of the file. If the rpath starts with $ORIGIN replace $ORIGIN with the
+ dirname of the original path name and then normalize the rpath.
+ A dictionary mapping relativized rpaths to normalized rpaths is returned.
+ """
+ norm_rpaths = list()
+ for rpath in rel_rpaths:
+ if rpath.startswith('$ORIGIN'):
+ sub = re.sub(re.escape('$ORIGIN'),
+ os.path.dirname(orig_path_name),
+ rpath)
+ norm = os.path.normpath(sub)
+ norm_rpaths.append(norm)
+ else:
+ norm_rpaths.append(rpath)
+ return norm_rpaths
+
+
def set_placeholder(dirname):
"""
return string of @'s with same length
@@ -129,183 +164,157 @@ def set_placeholder(dirname):
return '@' * len(dirname)
-def get_placeholder_rpaths(path_name, orig_rpaths):
+def macho_make_paths_relative(path_name, old_layout_root,
+ rpaths, deps, idpath):
"""
- Replaces original layout root dir with a placeholder string in all rpaths.
+ Return a dictionary mapping the original rpaths to the relativized rpaths.
+ This dictionary is used to replace paths in mach-o binaries.
+ Replace old_dir with relative path from dirname of path name
+ in rpaths and deps; idpath is replaced with @rpath/libname.
"""
- rel_rpaths = []
- orig_dir = spack.store.layout.root
- for rpath in orig_rpaths:
- if re.match(orig_dir, rpath):
- placeholder = set_placeholder(orig_dir)
- rel = re.sub(orig_dir, placeholder, rpath)
- rel_rpaths.append('%s' % rel)
- else:
- rel_rpaths.append(rpath)
- return rel_rpaths
-
-
-def macho_get_paths(path_name):
- """
- Examines the output of otool -l path_name for these three fields:
- LC_ID_DYLIB, LC_LOAD_DYLIB, LC_RPATH and parses out the rpaths,
- dependiencies and library id.
- Returns these values.
- """
- otool = Executable('otool')
- output = otool("-l", path_name, output=str, err=str)
- last_cmd = None
- idpath = None
- rpaths = []
- deps = []
- for line in output.split('\n'):
- match = re.search('( *[a-zA-Z]+ )(.*)', line)
- if match:
- lhs = match.group(1).lstrip().rstrip()
- rhs = match.group(2)
- match2 = re.search(r'(.*) \(.*\)', rhs)
- if match2:
- rhs = match2.group(1)
- if lhs == 'cmd':
- last_cmd = rhs
- if lhs == 'path' and last_cmd == 'LC_RPATH':
- rpaths.append(rhs)
- if lhs == 'name' and last_cmd == 'LC_ID_DYLIB':
- idpath = rhs
- if lhs == 'name' and last_cmd == 'LC_LOAD_DYLIB':
- deps.append(rhs)
- return rpaths, deps, idpath
-
-
-def macho_make_paths_relative(path_name, old_dir, rpaths, deps, idpath):
- """
- Replace old_dir with relative path from dirname(path_name)
- in rpaths and deps; idpaths are replaced with @rpath/libname as needed;
- replacement are returned.
- """
- new_idpath = None
+ paths_to_paths = dict()
if idpath:
- new_idpath = '@rpath/%s' % os.path.basename(idpath)
- new_rpaths = list()
- new_deps = list()
+ paths_to_paths[idpath] = os.path.join(
+ '@rpath', '%s' % os.path.basename(idpath))
for rpath in rpaths:
- if re.match(old_dir, rpath):
+ if re.match(old_layout_root, rpath):
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
- new_rpaths.append('@loader_path/%s' % rel)
+ paths_to_paths[rpath] = os.path.join('@loader_path', '%s' % rel)
else:
- new_rpaths.append(rpath)
+ paths_to_paths[rpath] = rpath
for dep in deps:
- if re.match(old_dir, dep):
+ if re.match(old_layout_root, dep):
rel = os.path.relpath(dep, start=os.path.dirname(path_name))
- new_deps.append('@loader_path/%s' % rel)
+ paths_to_paths[dep] = os.path.join('@loader_path', '%s' % rel)
else:
- new_deps.append(dep)
- return (new_rpaths, new_deps, new_idpath)
+ paths_to_paths[dep] = dep
+ return paths_to_paths
-def macho_make_paths_placeholder(rpaths, deps, idpath):
+def macho_make_paths_normal(orig_path_name, rpaths, deps, idpath):
"""
- Replace old_dir with a placeholder of the same length
- in rpaths and deps and idpaths is needed.
- replacement are returned.
+ Return a dictionary mapping the relativized rpaths to the original rpaths.
+ This dictionary is used to replace paths in mach-o binaries.
+ Replace '@loader_path' with the dirname of the origname path name
+ in rpaths and deps; idpath is replaced with the original path name
"""
- new_idpath = None
- old_dir = spack.store.layout.root
- placeholder = set_placeholder(old_dir)
+ rel_to_orig = dict()
if idpath:
- new_idpath = re.sub(old_dir, placeholder, idpath)
- new_rpaths = list()
- new_deps = list()
+ rel_to_orig[idpath] = orig_path_name
+
for rpath in rpaths:
- if re.match(old_dir, rpath):
- ph = re.sub(old_dir, placeholder, rpath)
- new_rpaths.append('%s' % ph)
+ if re.match('@loader_path', rpath):
+ norm = os.path.normpath(re.sub(re.escape('@loader_path'),
+ os.path.dirname(orig_path_name),
+ rpath))
+ rel_to_orig[rpath] = norm
else:
- new_rpaths.append(rpath)
+ rel_to_orig[rpath] = rpath
for dep in deps:
- if re.match(old_dir, dep):
- ph = re.sub(old_dir, placeholder, dep)
- new_deps.append('%s' % ph)
+ if re.match('@loader_path', dep):
+ norm = os.path.normpath(re.sub(re.escape('@loader_path'),
+ os.path.dirname(orig_path_name),
+ dep))
+ rel_to_orig[dep] = norm
else:
- new_deps.append(dep)
- return (new_rpaths, new_deps, new_idpath)
-
+ rel_to_orig[dep] = dep
+ return rel_to_orig
+
+
+def macho_find_paths(orig_rpaths, deps, idpath,
+ old_layout_root, prefix_to_prefix):
+ """
+ Inputs
+ original rpaths from mach-o binaries
+ dependency libraries for mach-o binaries
+ id path of mach-o libraries
+ old install directory layout root
+ prefix_to_prefix dictionary which maps prefixes in the old directory layout
+ to directories in the new directory layout
+ Output
+ paths_to_paths dictionary which maps all of the old paths to new paths
+ """
+ paths_to_paths = dict()
+ for orig_rpath in orig_rpaths:
+ if orig_rpath.startswith(old_layout_root):
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if orig_rpath.startswith(old_prefix):
+ new_rpath = re.sub(re.escape(old_prefix),
+ new_prefix, orig_rpath)
+ paths_to_paths[orig_rpath] = new_rpath
+ else:
+ paths_to_paths[orig_rpath] = orig_rpath
-def macho_replace_paths(old_dir, new_dir, rpaths, deps, idpath):
- """
- Replace old_dir with new_dir in rpaths, deps and idpath
- and return replacements
- """
- new_idpath = None
if idpath:
- new_idpath = idpath.replace(old_dir, new_dir)
- new_rpaths = list()
- new_deps = list()
- for rpath in rpaths:
- new_rpath = rpath.replace(old_dir, new_dir)
- new_rpaths.append(new_rpath)
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if idpath.startswith(old_prefix):
+ paths_to_paths[idpath] = re.sub(
+ re.escape(old_prefix), new_prefix, idpath)
for dep in deps:
- new_dep = dep.replace(old_dir, new_dir)
- new_deps.append(new_dep)
- return new_rpaths, new_deps, new_idpath
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if dep.startswith(old_prefix):
+ paths_to_paths[dep] = re.sub(
+ re.escape(old_prefix), new_prefix, dep)
+ if dep.startswith('@'):
+ paths_to_paths[dep] = dep
+
+ return paths_to_paths
def modify_macho_object(cur_path, rpaths, deps, idpath,
- new_rpaths, new_deps, new_idpath):
+ paths_to_paths):
"""
- Modify MachO binary path_name by replacing old_dir with new_dir
- or the relative path to spack install root.
- The old install dir in LC_ID_DYLIB is replaced with the new install dir
- using install_name_tool -id newid binary
- The old install dir in LC_LOAD_DYLIB is replaced with the new install dir
- using install_name_tool -change old new binary
- The old install dir in LC_RPATH is replaced with the new install dir using
- install_name_tool -rpath old new binary
+ This function is used to make machO buildcaches on macOS by
+ replacing old paths with new paths using install_name_tool
+ Inputs:
+ mach-o binary to be modified
+ original rpaths
+ original dependency paths
+ original id path if a mach-o library
+ dictionary mapping paths in old install layout to new install layout
"""
# avoid error message for libgcc_s
if 'libgcc_' in cur_path:
return
- install_name_tool = Executable('install_name_tool')
- if new_idpath and not idpath == new_idpath:
- install_name_tool('-id', new_idpath, str(cur_path))
-
- if len(deps) == len(new_deps):
- for orig, new in zip(deps, new_deps):
- if not orig == new:
- install_name_tool('-change', orig, new, str(cur_path))
-
- if len(rpaths) == len(new_rpaths):
- for orig, new in zip(rpaths, new_rpaths):
- if not orig == new:
- install_name_tool('-rpath', orig, new, str(cur_path))
+ install_name_tool = executable.Executable('install_name_tool')
+ if idpath:
+ new_idpath = paths_to_paths.get(idpath, None)
+ if new_idpath and not idpath == new_idpath:
+ install_name_tool('-id', new_idpath, str(cur_path))
+ for dep in deps:
+ new_dep = paths_to_paths.get(dep)
+ if new_dep and dep != new_dep:
+ install_name_tool('-change', dep, new_dep, str(cur_path))
+
+ for orig_rpath in rpaths:
+ new_rpath = paths_to_paths.get(orig_rpath)
+ if new_rpath and not orig_rpath == new_rpath:
+ install_name_tool('-rpath', orig_rpath, new_rpath, str(cur_path))
return
-def modify_object_macholib(cur_path, old_dir, new_dir):
+def modify_object_macholib(cur_path, paths_to_paths):
"""
- Modify MachO binary path_name by replacing old_dir with new_dir
- or the relative path to spack install root.
- The old install dir in LC_ID_DYLIB is replaced with the new install dir
- using py-macholib
- The old install dir in LC_LOAD_DYLIB is replaced with the new install dir
- using py-macholib
- The old install dir in LC_RPATH is replaced with the new install dir using
- using py-macholib
+ This function is used when install machO buildcaches on linux by
+ rewriting mach-o loader commands for dependency library paths of
+ mach-o binaries and the id path for mach-o libraries.
+ Rewritting of rpaths is handled by replace_prefix_bin.
+ Inputs
+ mach-o binary to be modified
+ dictionary mapping paths in old install layout to new install layout
"""
- if cur_path.endswith('.o'):
- return
- try:
- from macholib.MachO import MachO
- except ImportError as e:
- raise MissingMacholibException(e)
- def match_func(cpath):
- rpath = cpath.replace(old_dir, new_dir)
- return rpath
+ dll = macholib.MachO.MachO(cur_path)
+
+ changedict = paths_to_paths
+
+ def changefunc(path):
+ npath = changedict.get(path, None)
+ return npath
+
+ dll.rewriteLoadCommands(changefunc)
- dll = MachO(cur_path)
- dll.rewriteLoadCommands(match_func)
try:
f = open(dll.filename, 'rb+')
for header in dll.headers:
@@ -320,14 +329,32 @@ def modify_object_macholib(cur_path, old_dir, new_dir):
return
-def strings_contains_installroot(path_name, root_dir):
+def macholib_get_paths(cur_path):
"""
- Check if the file contain the install root string.
+ Get rpaths, dependencies and id of mach-o objects
+ using python macholib package
"""
- strings = Executable('strings')
- output = strings('%s' % path_name,
- output=str, err=str)
- return (root_dir in output or spack.paths.prefix in output)
+ dll = macholib.MachO.MachO(cur_path)
+
+ ident = None
+ rpaths = list()
+ deps = list()
+ for header in dll.headers:
+ rpaths = [data.rstrip(b'\0').decode('utf-8')
+ for load_command, dylib_command, data in header.commands if
+ load_command.cmd == macholib.mach_o.LC_RPATH]
+ deps = [data.rstrip(b'\0').decode('utf-8')
+ for load_command, dylib_command, data in header.commands if
+ load_command.cmd == macholib.mach_o.LC_LOAD_DYLIB]
+ idents = [data.rstrip(b'\0').decode('utf-8')
+ for load_command, dylib_command, data in header.commands if
+ load_command.cmd == macholib.mach_o.LC_ID_DYLIB]
+ if len(idents) == 1:
+ ident = idents[0]
+ tty.debug('ident: %s' % ident)
+ tty.debug('deps: %s' % deps)
+ tty.debug('rpaths: %s' % rpaths)
+ return (rpaths, deps, ident)
def modify_elf_object(path_name, new_rpaths):
@@ -338,21 +365,23 @@ def modify_elf_object(path_name, new_rpaths):
new_joined = ':'.join(new_rpaths)
# if we're relocating patchelf itself, use it
+ bak_path = path_name + ".bak"
if path_name[-13:] == "/bin/patchelf":
- bak_path = path_name + ".bak"
shutil.copy(path_name, bak_path)
- patchelf = Executable(bak_path)
+ patchelf = executable.Executable(bak_path)
else:
- patchelf = Executable(get_patchelf())
+ patchelf = executable.Executable(_patchelf())
try:
patchelf('--force-rpath', '--set-rpath', '%s' % new_joined,
'%s' % path_name, output=str, error=str)
- except ProcessError as e:
- tty.die('patchelf --set-rpath %s failed' %
- path_name, e)
- pass
+ except executable.ProcessError as e:
+ msg = 'patchelf --force-rpath --set-rpath %s failed with error %s' % (
+ path_name, e)
+ tty.warn(msg)
+ if os.path.exists(bak_path):
+ os.remove(bak_path)
def needs_binary_relocation(m_type, m_subtype):
@@ -412,18 +441,19 @@ def replace_prefix_bin(path_name, old_dir, new_dir):
if padding < 0:
return data
return match.group().replace(old_dir.encode('utf-8'),
- new_dir.encode('utf-8')) + b'\0' * padding
+ os.sep.encode('utf-8') * padding +
+ new_dir.encode('utf-8'))
with open(path_name, 'rb+') as f:
data = f.read()
f.seek(0)
original_data_len = len(data)
- pat = re.compile(old_dir.encode('utf-8') + b'([^\0]*?)\0')
+ pat = re.compile(old_dir.encode('utf-8'))
if not pat.search(data):
return
ndata = pat.sub(replace, data)
if not len(ndata) == original_data_len:
- raise BinaryStringReplacementException(
+ raise BinaryStringReplacementError(
path_name, original_data_len, len(ndata))
f.write(ndata)
f.truncate()
@@ -446,95 +476,148 @@ def replace_prefix_nullterm(path_name, old_dir, new_dir):
return data
return match.group().replace(old_dir.encode('utf-8'),
new_dir.encode('utf-8')) + b'\0' * padding
+
+ if len(new_dir) > len(old_dir):
+ raise BinaryTextReplaceError(old_dir, new_dir)
+
with open(path_name, 'rb+') as f:
data = f.read()
f.seek(0)
original_data_len = len(data)
- pat = re.compile(old_dir.encode('utf-8') + b'([^\0]*?)\0')
+ pat = re.compile(re.escape(old_dir).encode('utf-8') + b'([^\0]*?)\0')
if not pat.search(data):
return
ndata = pat.sub(replace, data)
if not len(ndata) == original_data_len:
- raise BinaryStringReplacementException(
+ raise BinaryStringReplacementError(
path_name, original_data_len, len(ndata))
f.write(ndata)
f.truncate()
-def relocate_macho_binaries(path_names, old_dir, new_dir, allow_root):
+def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
+ prefix_to_prefix, rel, old_prefix, new_prefix):
"""
- Change old_dir to new_dir in LC_RPATH of mach-o files (on macOS)
- Change old_dir to new_dir in LC_ID and LC_DEP of mach-o files
- Account for the case where old_dir is now a placeholder
+ Use macholib python package to get the rpaths, depedent libraries
+ and library identity for libraries from the MachO object. Modify them
+ with the replacement paths queried from the dictionary mapping old layout
+ prefixes to hashes and the dictionary mapping hashes to the new layout
+ prefixes.
"""
- placeholder = set_placeholder(old_dir)
+
for path_name in path_names:
+ # Corner case where macho object file ended up in the path name list
if path_name.endswith('.o'):
continue
- if new_dir == old_dir:
- continue
- if platform.system().lower() == 'darwin':
- rpaths, deps, idpath = macho_get_paths(path_name)
- # one pass to replace placeholder
- (n_rpaths,
- n_deps,
- n_idpath) = macho_replace_paths(placeholder,
- new_dir,
- rpaths,
- deps,
- idpath)
- # another pass to replace old_dir
- (new_rpaths,
- new_deps,
- new_idpath) = macho_replace_paths(old_dir,
- new_dir,
- n_rpaths,
- n_deps,
- n_idpath)
- modify_macho_object(path_name,
- rpaths, deps, idpath,
- new_rpaths, new_deps, new_idpath)
+ if rel:
+ # get the relativized paths
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the file path name in the original prefix
+ orig_path_name = re.sub(re.escape(new_prefix), old_prefix,
+ path_name)
+ # get the mapping of the relativized paths to the original
+ # normalized paths
+ rel_to_orig = macho_make_paths_normal(orig_path_name,
+ rpaths, deps,
+ idpath)
+ # replace the relativized paths with normalized paths
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, rel_to_orig)
+ else:
+ modify_object_macholib(path_name,
+ rel_to_orig)
+ # get the normalized paths in the mach-o binary
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the mapping of paths in old prefix to path in new prefix
+ paths_to_paths = macho_find_paths(rpaths, deps, idpath,
+ old_layout_root,
+ prefix_to_prefix)
+ # replace the old paths with new paths
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, paths_to_paths)
+ else:
+ modify_object_macholib(path_name,
+ paths_to_paths)
+ # get the new normalized path in the mach-o binary
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the mapping of paths to relative paths in the new prefix
+ paths_to_paths = macho_make_paths_relative(path_name,
+ new_layout_root,
+ rpaths, deps, idpath)
+ # replace the new paths with relativized paths in the new prefix
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, paths_to_paths)
+ else:
+ modify_object_macholib(path_name,
+ paths_to_paths)
else:
- modify_object_macholib(path_name, placeholder, new_dir)
- modify_object_macholib(path_name, old_dir, new_dir)
- if len(new_dir) <= len(old_dir):
- replace_prefix_nullterm(path_name, old_dir, new_dir)
+ # get the paths in the old prefix
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the mapping of paths in the old prerix to the new prefix
+ paths_to_paths = macho_find_paths(rpaths, deps, idpath,
+ old_layout_root,
+ prefix_to_prefix)
+ # replace the old paths with new paths
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, paths_to_paths)
+ else:
+ modify_object_macholib(path_name,
+ paths_to_paths)
+
+
+def elf_find_paths(orig_rpaths, old_layout_root, prefix_to_prefix):
+ new_rpaths = list()
+ for orig_rpath in orig_rpaths:
+ if orig_rpath.startswith(old_layout_root):
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if orig_rpath.startswith(old_prefix):
+ new_rpaths.append(re.sub(re.escape(old_prefix),
+ new_prefix, orig_rpath))
else:
- tty.warn('Cannot do a binary string replacement'
- ' with padding for %s'
- ' because %s is longer than %s' %
- (path_name, new_dir, old_dir))
+ new_rpaths.append(orig_rpath)
+ return new_rpaths
-def relocate_elf_binaries(path_names, old_dir, new_dir, allow_root):
+def relocate_elf_binaries(path_names, old_layout_root, new_layout_root,
+ prefix_to_prefix, rel, old_prefix, new_prefix):
"""
- Change old_dir to new_dir in RPATHs of elf binaries
- Account for the case where old_dir is now a placeholder
+ Use patchelf to get the original rpaths and then replace them with
+ rpaths in the new directory layout.
+ New rpaths are determined from a dictionary mapping the prefixes in the
+ old directory layout to the prefixes in the new directory layout if the
+ rpath was in the old layout root, i.e. system paths are not replaced.
"""
- placeholder = set_placeholder(old_dir)
for path_name in path_names:
orig_rpaths = get_existing_elf_rpaths(path_name)
- if orig_rpaths:
- # one pass to replace placeholder
- n_rpaths = substitute_rpath(orig_rpaths,
- placeholder, new_dir)
- # one pass to replace old_dir
- new_rpaths = substitute_rpath(n_rpaths,
- old_dir, new_dir)
+ new_rpaths = list()
+ if rel:
+ # get the file path in the old_prefix
+ orig_path_name = re.sub(re.escape(new_prefix), old_prefix,
+ path_name)
+ # get the normalized rpaths in the old prefix using the file path
+ # in the orig prefix
+ orig_norm_rpaths = get_normalized_elf_rpaths(orig_path_name,
+ orig_rpaths)
+ # get the normalize rpaths in the new prefix
+ norm_rpaths = elf_find_paths(orig_norm_rpaths, old_layout_root,
+ prefix_to_prefix)
+ # get the relativized rpaths in the new prefix
+ new_rpaths = get_relative_elf_rpaths(path_name, new_layout_root,
+ norm_rpaths)
+ modify_elf_object(path_name, new_rpaths)
+ else:
+ new_rpaths = elf_find_paths(orig_rpaths, old_layout_root,
+ prefix_to_prefix)
modify_elf_object(path_name, new_rpaths)
- if not new_dir == old_dir:
- if len(new_dir) <= len(old_dir):
- replace_prefix_bin(path_name, old_dir, new_dir)
- else:
- tty.warn('Cannot do a binary string replacement'
- ' with padding for %s'
- ' because %s is longer than %s.' %
- (path_name, new_dir, old_dir))
def make_link_relative(cur_path_names, orig_path_names):
"""
- Change absolute links to be relative.
+ Change absolute links to relative links.
"""
for cur_path, orig_path in zip(cur_path_names, orig_path_names):
target = os.readlink(orig_path)
@@ -544,8 +627,8 @@ def make_link_relative(cur_path_names, orig_path_names):
os.symlink(relative_target, cur_path)
-def make_macho_binaries_relative(cur_path_names, orig_path_names, old_dir,
- allow_root):
+def make_macho_binaries_relative(cur_path_names, orig_path_names,
+ old_layout_root):
"""
Replace old RPATHs with paths relative to old_dir in binary files
"""
@@ -554,33 +637,26 @@ def make_macho_binaries_relative(cur_path_names, orig_path_names, old_dir,
deps = set()
idpath = None
if platform.system().lower() == 'darwin':
- (rpaths, deps, idpath) = macho_get_paths(cur_path)
- (new_rpaths,
- new_deps,
- new_idpath) = macho_make_paths_relative(orig_path, old_dir,
- rpaths, deps, idpath)
+ (rpaths, deps, idpath) = macholib_get_paths(cur_path)
+ paths_to_paths = macho_make_paths_relative(orig_path,
+ old_layout_root,
+ rpaths, deps, idpath)
modify_macho_object(cur_path,
rpaths, deps, idpath,
- new_rpaths, new_deps, new_idpath)
- if (not allow_root and
- not file_is_relocatable(cur_path)):
- raise InstallRootStringException(cur_path, old_dir)
+ paths_to_paths)
-def make_elf_binaries_relative(cur_path_names, orig_path_names, old_dir,
- allow_root):
+def make_elf_binaries_relative(cur_path_names, orig_path_names,
+ old_layout_root):
"""
Replace old RPATHs with paths relative to old_dir in binary files
"""
for cur_path, orig_path in zip(cur_path_names, orig_path_names):
orig_rpaths = get_existing_elf_rpaths(cur_path)
if orig_rpaths:
- new_rpaths = get_relative_rpaths(orig_path, old_dir,
- orig_rpaths)
+ new_rpaths = get_relative_elf_rpaths(orig_path, old_layout_root,
+ orig_rpaths)
modify_elf_object(cur_path, new_rpaths)
- if (not allow_root and
- not file_is_relocatable(cur_path)):
- raise InstallRootStringException(cur_path, old_dir)
def check_files_relocatable(cur_path_names, allow_root):
@@ -590,67 +666,77 @@ def check_files_relocatable(cur_path_names, allow_root):
for cur_path in cur_path_names:
if (not allow_root and
not file_is_relocatable(cur_path)):
- raise InstallRootStringException(
+ raise InstallRootStringError(
cur_path, spack.store.layout.root)
-def make_link_placeholder(cur_path_names, cur_dir, old_dir):
- """
- Replace old install path with placeholder in absolute links.
-
- Links in ``cur_path_names`` must link to absolute paths.
- """
- for cur_path in cur_path_names:
- placeholder = set_placeholder(spack.store.layout.root)
- placeholder_prefix = old_dir.replace(spack.store.layout.root,
- placeholder)
- cur_src = os.readlink(cur_path)
- rel_src = os.path.relpath(cur_src, cur_dir)
- new_src = os.path.join(placeholder_prefix, rel_src)
-
- os.unlink(cur_path)
- os.symlink(new_src, cur_path)
-
-
-def relocate_links(path_names, old_dir, new_dir):
- """
- Replace old path with new path in link sources.
-
- Links in ``path_names`` must link to absolute paths or placeholders.
- """
- placeholder = set_placeholder(old_dir)
- for path_name in path_names:
- old_src = os.readlink(path_name)
- # replace either placeholder or old_dir
- new_src = old_src.replace(placeholder, new_dir, 1)
- new_src = new_src.replace(old_dir, new_dir, 1)
-
- os.unlink(path_name)
- os.symlink(new_src, path_name)
-
+def relocate_links(linknames, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix, prefix_to_prefix):
+ """
+ The symbolic links in filenames are absolute links or placeholder links.
+ The old link target is read and the placeholder is replaced by the old
+ layout root. If the old link target is in the old install prefix, the new
+ link target is create by replacing the old install prefix with the new
+ install prefix.
+ """
+ placeholder = set_placeholder(old_layout_root)
+ link_names = [os.path.join(new_install_prefix, linkname)
+ for linkname in linknames]
+ for link_name in link_names:
+ link_target = os.readlink(link_name)
+ link_target = re.sub(placeholder, old_layout_root, link_target)
+ if link_target.startswith(old_install_prefix):
+ new_link_target = re.sub(
+ old_install_prefix, new_install_prefix, link_target)
+ os.unlink(link_name)
+ os.symlink(new_link_target, link_name)
+ if (os.path.isabs(link_target) and
+ not link_target.startswith(new_install_prefix)):
+ msg = 'Link target %s' % link_target
+ msg += ' for symbolic link %s is outside' % link_name
+ msg += ' of the newinstall prefix %s.\n' % new_install_prefix
+ tty.warn(msg)
+
+
+def relocate_text(path_names, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix,
+ old_spack_prefix, new_spack_prefix,
+ prefix_to_prefix):
+ """
+ Replace old paths with new paths in text files
+ including the path the the spack sbang script
+ """
+ sbangre = '#!/bin/bash %s/bin/sbang' % old_spack_prefix
+ sbangnew = '#!/bin/bash %s/bin/sbang' % new_spack_prefix
-def relocate_text(path_names, oldpath, newpath, oldprefix, newprefix):
- """
- Replace old path with new path in text files
- including the path the the spack sbang script.
- """
- sbangre = '#!/bin/bash %s/bin/sbang' % oldprefix
- sbangnew = '#!/bin/bash %s/bin/sbang' % newprefix
for path_name in path_names:
- replace_prefix_text(path_name, oldpath, newpath)
+ replace_prefix_text(path_name, old_install_prefix, new_install_prefix)
+ for orig_dep_prefix, new_dep_prefix in prefix_to_prefix.items():
+ replace_prefix_text(path_name, orig_dep_prefix, new_dep_prefix)
+ replace_prefix_text(path_name, old_layout_root, new_layout_root)
replace_prefix_text(path_name, sbangre, sbangnew)
- replace_prefix_text(path_name, oldprefix, newprefix)
-def substitute_rpath(orig_rpath, topdir, new_root_path):
- """
- Replace topdir with new_root_path RPATH list orig_rpath
- """
- new_rpaths = []
- for path in orig_rpath:
- new_rpath = path.replace(topdir, new_root_path)
- new_rpaths.append(new_rpath)
- return new_rpaths
+def relocate_text_bin(path_names, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix,
+ old_spack_prefix, new_spack_prefix,
+ prefix_to_prefix):
+ """
+ Replace null terminated path strings hard coded into binaries.
+ Raise an exception when the new path in longer than the old path
+ because this breaks the binary.
+ """
+ if len(new_install_prefix) <= len(old_install_prefix):
+ for path_name in path_names:
+ for old_dep_prefix, new_dep_prefix in prefix_to_prefix.items():
+ if len(new_dep_prefix) <= len(old_dep_prefix):
+ replace_prefix_bin(
+ path_name, old_dep_prefix, new_dep_prefix)
+ replace_prefix_bin(path_name, old_spack_prefix, new_spack_prefix)
+ else:
+ if len(path_names) > 0:
+ raise BinaryTextReplaceError(
+ old_install_prefix, new_install_prefix)
def is_relocatable(spec):
@@ -713,7 +799,7 @@ def file_is_relocatable(file, paths_to_relocate=None):
if not os.path.isabs(file):
raise ValueError('{0} is not an absolute path'.format(file))
- strings = Executable('strings')
+ strings = executable.Executable('strings')
# Remove the RPATHS from the strings in the executable
set_of_strings = set(strings(file, output=str).split())
@@ -728,7 +814,7 @@ def file_is_relocatable(file, paths_to_relocate=None):
set_of_strings.discard(rpaths)
if platform.system().lower() == 'darwin':
if m_subtype == 'x-mach-binary':
- rpaths, deps, idpath = macho_get_paths(file)
+ rpaths, deps, idpath = macholib_get_paths(file)
set_of_strings.discard(set(rpaths))
set_of_strings.discard(set(deps))
if idpath is not None:
@@ -775,9 +861,11 @@ def mime_type(file):
Returns:
Tuple containing the MIME type and subtype
"""
- file_cmd = Executable('file')
+ file_cmd = executable.Executable('file')
output = file_cmd('-b', '-h', '--mime-type', file, output=str, error=str)
tty.debug('[MIME_TYPE] {0} -> {1}'.format(file, output.strip()))
+ # In corner cases the output does not contain a subtype prefixed with a /
+ # In those cases add the / so the tuple can be formed.
if '/' not in output:
output += '/'
split_by_slash = output.strip().split('/')
diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py
index 8e3dae5d47..764f1f9168 100644
--- a/lib/spack/spack/repo.py
+++ b/lib/spack/spack/repo.py
@@ -16,22 +16,20 @@ import shutil
import stat
import sys
import traceback
-
-from six import string_types, add_metaclass
+import types
try:
from collections.abc import Mapping # novm
except ImportError:
from collections import Mapping
-from types import ModuleType
+import six
import ruamel.yaml as yaml
import llnl.util.lang
import llnl.util.tty as tty
-from llnl.util.filesystem import mkdirp, install
-
+import llnl.util.filesystem as fs
import spack.config
import spack.caches
import spack.error
@@ -39,11 +37,9 @@ import spack.patch
import spack.spec
import spack.util.spack_json as sjson
import spack.util.imp as simp
-from spack.provider_index import ProviderIndex
-from spack.util.path import canonicalize_path
-from spack.util.naming import NamespaceTrie, valid_module_name
-from spack.util.naming import mod_to_class, possible_spack_module_names
-
+import spack.provider_index
+import spack.util.path
+import spack.util.naming as nm
#: Super-namespace for all packages.
#: Package modules are imported as spack.pkg.<namespace>.<pkg-name>.
@@ -95,7 +91,7 @@ def autospec(function):
return converter
-class SpackNamespace(ModuleType):
+class SpackNamespace(types.ModuleType):
""" Allow lazy loading of modules."""
def __init__(self, namespace):
@@ -151,7 +147,7 @@ class FastPackageChecker(Mapping):
pkg_dir = os.path.join(self.packages_path, pkg_name)
# Warn about invalid names that look like packages.
- if not valid_module_name(pkg_name):
+ if not nm.valid_module_name(pkg_name):
if not pkg_name.startswith('.'):
tty.warn('Skipping package at {0}. "{1}" is not '
'a valid Spack module name.'.format(
@@ -244,10 +240,11 @@ class TagIndex(Mapping):
# Add it again under the appropriate tags
for tag in getattr(package, 'tags', []):
+ tag = tag.lower()
self._tag_dict[tag].append(package.name)
-@add_metaclass(abc.ABCMeta)
+@six.add_metaclass(abc.ABCMeta)
class Indexer(object):
"""Adaptor for indexes that need to be generated when repos are updated."""
@@ -305,10 +302,10 @@ class TagIndexer(Indexer):
class ProviderIndexer(Indexer):
"""Lifecycle methods for virtual package providers."""
def _create(self):
- return ProviderIndex()
+ return spack.provider_index.ProviderIndex()
def read(self, stream):
- self.index = ProviderIndex.from_json(stream)
+ self.index = spack.provider_index.ProviderIndex.from_json(stream)
def update(self, pkg_fullname):
self.index.remove_provider(pkg_fullname)
@@ -447,7 +444,7 @@ class RepoPath(object):
def __init__(self, *repos):
self.repos = []
- self.by_namespace = NamespaceTrie()
+ self.by_namespace = nm.NamespaceTrie()
self._all_package_names = None
self._provider_index = None
@@ -456,7 +453,7 @@ class RepoPath(object):
# Add each repo to this path.
for repo in repos:
try:
- if isinstance(repo, string_types):
+ if isinstance(repo, six.string_types):
repo = Repo(repo)
self.put_last(repo)
except RepoError as e:
@@ -544,7 +541,7 @@ class RepoPath(object):
def provider_index(self):
"""Merged ProviderIndex from all Repos in the RepoPath."""
if self._provider_index is None:
- self._provider_index = ProviderIndex()
+ self._provider_index = spack.provider_index.ProviderIndex()
for repo in reversed(self.repos):
self._provider_index.merge(repo.provider_index)
@@ -707,7 +704,7 @@ class Repo(object):
"""
# Root directory, containing _repo.yaml and package dirs
# Allow roots to by spack-relative by starting with '$spack'
- self.root = canonicalize_path(root)
+ self.root = spack.util.path.canonicalize_path(root)
# check and raise BadRepoError on fail.
def check(condition, msg):
@@ -803,7 +800,7 @@ class Repo(object):
if import_name in self:
return import_name
- options = possible_spack_module_names(import_name)
+ options = nm.possible_spack_module_names(import_name)
options.remove(import_name)
for name in options:
if name in self:
@@ -921,18 +918,18 @@ class Repo(object):
% (self.namespace, spec.fullname))
# Install patch files needed by the package.
- mkdirp(path)
+ fs.mkdirp(path)
for patch in itertools.chain.from_iterable(
spec.package.patches.values()):
if patch.path:
if os.path.exists(patch.path):
- install(patch.path, path)
+ fs.install(patch.path, path)
else:
tty.warn("Patch file did not exist: %s" % patch.path)
# Install the package.py file itself.
- install(self.filename_for_package_name(spec.name), path)
+ fs.install(self.filename_for_package_name(spec.name), path)
def purge(self):
"""Clear entire package instance cache."""
@@ -1006,6 +1003,7 @@ class Repo(object):
index = self.tag_index
for t in tags:
+ t = t.lower()
v &= set(index[t])
return sorted(v)
@@ -1082,7 +1080,7 @@ class Repo(object):
raise InvalidNamespaceError('Invalid namespace for %s repo: %s'
% (self.namespace, namespace))
- class_name = mod_to_class(pkg_name)
+ class_name = nm.mod_to_class(pkg_name)
module = self._get_pkg_module(pkg_name)
cls = getattr(module, class_name)
@@ -1107,7 +1105,7 @@ def create_repo(root, namespace=None):
If the namespace is not provided, use basename of root.
Return the canonicalized path and namespace of the created repository.
"""
- root = canonicalize_path(root)
+ root = spack.util.path.canonicalize_path(root)
if not namespace:
namespace = os.path.basename(root)
@@ -1141,7 +1139,7 @@ def create_repo(root, namespace=None):
config_path = os.path.join(root, repo_config_name)
packages_path = os.path.join(root, packages_dir_name)
- mkdirp(packages_path)
+ fs.mkdirp(packages_path)
with open(config_path, 'w') as config:
config.write("repo:\n")
config.write(" namespace: '%s'\n" % namespace)
@@ -1163,7 +1161,7 @@ def create_repo(root, namespace=None):
def create_or_construct(path, namespace=None):
"""Create a repository, or just return a Repo if it already exists."""
if not os.path.exists(path):
- mkdirp(path)
+ fs.mkdirp(path)
create_repo(path, namespace)
return Repo(path)
diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py
index 1378698825..a05af2f438 100644
--- a/lib/spack/spack/schema/config.py
+++ b/lib/spack/spack/schema/config.py
@@ -55,6 +55,7 @@ properties = {
},
'source_cache': {'type': 'string'},
'misc_cache': {'type': 'string'},
+ 'connect_timeout': {'type': 'integer', 'minimum': 0},
'verify_ssl': {'type': 'boolean'},
'suppress_gpg_warnings': {'type': 'boolean'},
'install_missing_compilers': {'type': 'boolean'},
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 718b5ef14d..c6fe2da762 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -3120,7 +3120,7 @@ class Spec(object):
A copy of this spec.
Examples:
- Deep copy with dependnecies::
+ Deep copy with dependencies::
spec.copy()
spec.copy(deps=True)
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index b445638228..54d370a50d 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -493,8 +493,14 @@ class Stage(object):
spack.caches.fetch_cache.store(
self.fetcher, self.mirror_paths.storage_path)
- def cache_mirror(self, stats):
- """Perform a fetch if the resource is not already cached"""
+ def cache_mirror(self, mirror, stats):
+ """Perform a fetch if the resource is not already cached
+
+ Arguments:
+ mirror (MirrorCache): the mirror to cache this Stage's resource in
+ stats (MirrorStats): this is updated depending on whether the
+ caching operation succeeded or failed
+ """
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
# BundleFetchStrategy has no source to fetch. The associated
# fetcher does nothing but the associated stage may still exist.
@@ -505,20 +511,23 @@ class Stage(object):
# must examine the type of the fetcher.
return
- dst_root = spack.caches.mirror_cache.root
+ if (mirror.skip_unstable_versions and
+ not fs.stable_target(self.default_fetcher)):
+ return
+
absolute_storage_path = os.path.join(
- dst_root, self.mirror_paths.storage_path)
+ mirror.root, self.mirror_paths.storage_path)
if os.path.exists(absolute_storage_path):
stats.already_existed(absolute_storage_path)
else:
self.fetch()
self.check()
- spack.caches.mirror_cache.store(
+ mirror.store(
self.fetcher, self.mirror_paths.storage_path)
stats.added(absolute_storage_path)
- spack.caches.mirror_cache.symlink(self.mirror_paths)
+ mirror.symlink(self.mirror_paths)
def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded
@@ -743,7 +752,8 @@ def purge():
def get_checksums_for_versions(
- url_dict, name, first_stage_function=None, keep_stage=False):
+ url_dict, name, first_stage_function=None, keep_stage=False,
+ fetch_options=None):
"""Fetches and checksums archives from URLs.
This function is called by both ``spack checksum`` and ``spack
@@ -757,6 +767,8 @@ def get_checksums_for_versions(
first_stage_function (callable): function that takes a Stage and a URL;
this is run on the stage of the first URL downloaded
keep_stage (bool): whether to keep staging area when command completes
+ fetch_options (dict): Options used for the fetcher (such as timeout
+ or cookies)
Returns:
(str): A multi-line string containing versions and corresponding hashes
@@ -790,7 +802,12 @@ def get_checksums_for_versions(
i = 0
for url, version in zip(urls, versions):
try:
- with Stage(url, keep=keep_stage) as stage:
+ if fetch_options:
+ url_or_fs = fs.URLFetchStrategy(
+ url, fetch_options=fetch_options)
+ else:
+ url_or_fs = url
+ with Stage(url_or_fs, keep=keep_stage) as stage:
# Fetch the archive
stage.fetch()
if i == 0 and first_stage_function:
diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py
index 744821a04e..295704798f 100644
--- a/lib/spack/spack/test/build_systems.py
+++ b/lib/spack/spack/test/build_systems.py
@@ -181,3 +181,110 @@ class TestAutotoolsPackage(object):
assert '--without-bar' in options
assert '--without-baz' in options
assert '--no-fee' in options
+
+
+@pytest.mark.usefixtures('config', 'mock_packages')
+class TestCMakePackage(object):
+
+ def test_define(self):
+ s = Spec('cmake-client')
+ s.concretize()
+ pkg = spack.repo.get(s)
+
+ for cls in (list, tuple):
+ arg = pkg.define('MULTI', cls(['right', 'up']))
+ assert arg == '-DMULTI:STRING=right;up'
+
+ arg = pkg.define('ENABLE_TRUTH', False)
+ assert arg == '-DENABLE_TRUTH:BOOL=OFF'
+ arg = pkg.define('ENABLE_TRUTH', True)
+ assert arg == '-DENABLE_TRUTH:BOOL=ON'
+
+ arg = pkg.define('SINGLE', 'red')
+ assert arg == '-DSINGLE:STRING=red'
+
+ def test_define_from_variant(self):
+ s = Spec('cmake-client multi=up,right ~truthy single=red')
+ s.concretize()
+ pkg = spack.repo.get(s)
+
+ arg = pkg.define_from_variant('MULTI')
+ assert arg == '-DMULTI:STRING=right;up'
+
+ arg = pkg.define_from_variant('ENABLE_TRUTH', 'truthy')
+ assert arg == '-DENABLE_TRUTH:BOOL=OFF'
+
+ arg = pkg.define_from_variant('SINGLE')
+ assert arg == '-DSINGLE:STRING=red'
+
+ with pytest.raises(KeyError, match="not a variant"):
+ pkg.define_from_variant('NONEXISTENT')
+
+
+@pytest.mark.usefixtures('config', 'mock_packages')
+class TestGNUMirrorPackage(object):
+
+ def test_define(self):
+ s = Spec('mirror-gnu')
+ s.concretize()
+ pkg = spack.repo.get(s)
+
+ s = Spec('mirror-gnu-broken')
+ s.concretize()
+ pkg_broken = spack.repo.get(s)
+
+ cls_name = type(pkg_broken).__name__
+ with pytest.raises(AttributeError,
+ match=r'{0} must define a `gnu_mirror_path` '
+ r'attribute \[none defined\]'
+ .format(cls_name)):
+ pkg_broken.urls
+
+ assert pkg.urls[0] == 'https://ftpmirror.gnu.org/' \
+ 'make/make-4.2.1.tar.gz'
+
+
+@pytest.mark.usefixtures('config', 'mock_packages')
+class TestSourcewarePackage(object):
+
+ def test_define(self):
+ s = Spec('mirror-sourceware')
+ s.concretize()
+ pkg = spack.repo.get(s)
+
+ s = Spec('mirror-sourceware-broken')
+ s.concretize()
+ pkg_broken = spack.repo.get(s)
+
+ cls_name = type(pkg_broken).__name__
+ with pytest.raises(AttributeError,
+ match=r'{0} must define a `sourceware_mirror_path` '
+ r'attribute \[none defined\]'
+ .format(cls_name)):
+ pkg_broken.urls
+
+ assert pkg.urls[0] == 'https://sourceware.org/pub/' \
+ 'bzip2/bzip2-1.0.8.tar.gz'
+
+
+@pytest.mark.usefixtures('config', 'mock_packages')
+class TestXorgPackage(object):
+
+ def test_define(self):
+ s = Spec('mirror-xorg')
+ s.concretize()
+ pkg = spack.repo.get(s)
+
+ s = Spec('mirror-xorg-broken')
+ s.concretize()
+ pkg_broken = spack.repo.get(s)
+
+ cls_name = type(pkg_broken).__name__
+ with pytest.raises(AttributeError,
+ match=r'{0} must define a `xorg_mirror_path` '
+ r'attribute \[none defined\]'
+ .format(cls_name)):
+ pkg_broken.urls
+
+ assert pkg.urls[0] == 'https://www.x.org/archive/individual/' \
+ 'util/util-macros-1.19.1.tar.bz2'
diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py
index 064daeb063..03d09b9771 100644
--- a/lib/spack/spack/test/cmd/buildcache.py
+++ b/lib/spack/spack/test/cmd/buildcache.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import errno
import platform
import pytest
@@ -11,6 +12,7 @@ import spack.main
import spack.binary_distribution
buildcache = spack.main.SpackCommand('buildcache')
+install = spack.main.SpackCommand('install')
@pytest.fixture()
@@ -41,3 +43,16 @@ def test_buildcache_list_duplicates(mock_get_specs, capsys):
output = buildcache('list', 'mpileaks', '@2.3')
assert output.count('mpileaks') == 3
+
+
+def test_buildcache_create_fail_on_perm_denied(
+ install_mockery, mock_fetch, monkeypatch, tmpdir):
+ """Ensure that buildcache create fails on permission denied error."""
+ install('trivial-install-test-package')
+
+ tmpdir.chmod(0)
+ with pytest.raises(OSError) as error:
+ buildcache('create', '-d', str(tmpdir),
+ '--unsigned', 'trivial-install-test-package')
+ assert error.value.errno == errno.EACCES
+ tmpdir.chmod(0o700)
diff --git a/lib/spack/spack/test/cmd/debug.py b/lib/spack/spack/test/cmd/debug.py
index 4a06276abf..2898ad670e 100644
--- a/lib/spack/spack/test/cmd/debug.py
+++ b/lib/spack/spack/test/cmd/debug.py
@@ -3,12 +3,15 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import platform
+
import pytest
import os
import os.path
-from spack.main import SpackCommand
+import spack.architecture as architecture
+from spack.main import SpackCommand, get_version
from spack.util.executable import which
debug = SpackCommand('debug')
@@ -41,3 +44,12 @@ def test_create_db_tarball(tmpdir, database):
spec_suffix = '%s/.spack/spec.yaml' % spec.dag_hash()
assert spec_suffix in contents
+
+
+def test_report():
+ out = debug('report')
+ arch = architecture.Arch(architecture.platform(), 'frontend', 'frontend')
+
+ assert get_version() in out
+ assert platform.python_version() in out
+ assert str(arch) in out
diff --git a/lib/spack/spack/test/cmd/dependencies.py b/lib/spack/spack/test/cmd/dependencies.py
index fc47069181..05d0556936 100644
--- a/lib/spack/spack/test/cmd/dependencies.py
+++ b/lib/spack/spack/test/cmd/dependencies.py
@@ -17,7 +17,7 @@ mpis = ['mpich', 'mpich2', 'multi-provider-mpi', 'zmpi']
mpi_deps = ['fake']
-def test_immediate_dependencies(mock_packages):
+def test_direct_dependencies(mock_packages):
out = dependencies('mpileaks')
actual = set(re.split(r'\s+', out.strip()))
expected = set(['callpath'] + mpis)
@@ -47,7 +47,7 @@ def test_transitive_dependencies_with_deptypes(mock_packages):
@pytest.mark.db
-def test_immediate_installed_dependencies(mock_packages, database):
+def test_direct_installed_dependencies(mock_packages, database):
with color_when(False):
out = dependencies('--installed', 'mpileaks^mpich')
diff --git a/lib/spack/spack/test/cmd/list.py b/lib/spack/spack/test/cmd/list.py
index 5d18787bc7..17f5a1b493 100644
--- a/lib/spack/spack/test/cmd/list.py
+++ b/lib/spack/spack/test/cmd/list.py
@@ -37,6 +37,14 @@ def test_list_tags():
assert 'cloverleaf3d' in output
assert 'hdf5' not in output
+ output = list('--tags', 'hpc')
+ assert 'nek5000' in output
+ assert 'mfem' in output
+
+ output = list('--tags', 'HPC')
+ assert 'nek5000' in output
+ assert 'mfem' in output
+
def test_list_format_name_only():
output = list('--format', 'name_only')
diff --git a/lib/spack/spack/test/cmd/load.py b/lib/spack/spack/test/cmd/load.py
index a10b99d45b..e6664a9d39 100644
--- a/lib/spack/spack/test/cmd/load.py
+++ b/lib/spack/spack/test/cmd/load.py
@@ -3,7 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
-from spack.main import SpackCommand
+import pytest
+from spack.main import SpackCommand, SpackCommandError
import spack.spec
import spack.user_environment as uenv
@@ -83,6 +84,18 @@ def test_load_includes_run_env(install_mockery, mock_fetch, mock_archive,
assert 'setenv FOOBAR mpileaks' in csh_out
+def test_load_first(install_mockery, mock_fetch, mock_archive, mock_packages):
+ """Test with and without the --first option"""
+ install('libelf@0.8.12')
+ install('libelf@0.8.13')
+ # Now there are two versions of libelf
+ with pytest.raises(SpackCommandError):
+ # This should cause an error due to multiple versions
+ load('--sh', 'libelf')
+ # Using --first should avoid the error condition
+ load('--sh', '--first', 'libelf')
+
+
def test_load_fails_no_shell(install_mockery, mock_fetch, mock_archive,
mock_packages):
"""Test that spack load prints an error message without a shell."""
diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py
index d62d7df432..4bb4fad224 100644
--- a/lib/spack/spack/test/cmd/mirror.py
+++ b/lib/spack/spack/test/cmd/mirror.py
@@ -66,6 +66,29 @@ def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config,
assert mirror_res == expected
+@pytest.fixture
+def source_for_pkg_with_hash(mock_packages, tmpdir):
+ pkg = spack.repo.get('trivial-pkg-with-valid-hash')
+ local_url_basename = os.path.basename(pkg.url)
+ local_path = os.path.join(str(tmpdir), local_url_basename)
+ with open(local_path, 'w') as f:
+ f.write(pkg.hashed_content)
+ local_url = "file://" + local_path
+ pkg.versions[spack.version.Version('1.0')]['url'] = local_url
+
+
+def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config,
+ source_for_pkg_with_hash):
+ mirror_dir = str(tmpdir_factory.mktemp('mirror-dir'))
+
+ specs = [spack.spec.Spec(x).concretized() for x in
+ ['git-test', 'trivial-pkg-with-valid-hash']]
+ spack.mirror.create(mirror_dir, specs, skip_unstable_versions=True)
+
+ assert (set(os.listdir(mirror_dir)) - set(['_source-cache']) ==
+ set(['trivial-pkg-with-valid-hash']))
+
+
def test_mirror_crud(tmp_scope, capsys):
with capsys.disabled():
mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
diff --git a/lib/spack/spack/test/cmd/python.py b/lib/spack/spack/test/cmd/python.py
index 5bc05e0127..b1c9d3db00 100644
--- a/lib/spack/spack/test/cmd/python.py
+++ b/lib/spack/spack/test/cmd/python.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import platform
+
import pytest
import spack
@@ -16,6 +18,11 @@ def test_python():
assert out.strip() == spack.spack_version
+def test_python_version():
+ out = python('-V')
+ assert platform.python_version() in out
+
+
def test_python_with_module():
# pytest rewrites a lot of modules, which interferes with runpy, so
# it's hard to test this. Trying to import a module like sys, that
diff --git a/lib/spack/spack/test/cmd/repo.py b/lib/spack/spack/test/cmd/repo.py
new file mode 100644
index 0000000000..82fe872710
--- /dev/null
+++ b/lib/spack/spack/test/cmd/repo.py
@@ -0,0 +1,35 @@
+# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import os.path
+
+import pytest
+import spack.main
+
+repo = spack.main.SpackCommand('repo')
+
+
+def test_help_option():
+ # Test 'spack repo --help' to check basic import works
+ # and the command exits successfully
+ with pytest.raises(SystemExit):
+ repo('--help')
+ assert repo.returncode in (None, 0)
+
+
+def test_create_add_list_remove(mutable_config, tmpdir):
+ # Create a new repository and check that the expected
+ # files are there
+ repo('create', str(tmpdir), 'mockrepo')
+ assert os.path.exists(os.path.join(str(tmpdir), 'repo.yaml'))
+
+ # Add the new repository and check it appears in the list output
+ repo('add', '--scope=site', str(tmpdir))
+ output = repo('list', '--scope=site', output=str)
+ assert 'mockrepo' in output
+
+ # Then remove it and check it's not there
+ repo('remove', '--scope=site', str(tmpdir))
+ output = repo('list', '--scope=site', output=str)
+ assert 'mockrepo' not in output
diff --git a/lib/spack/spack/test/compilers.py b/lib/spack/spack/test/compilers.py
index 51eedd748f..13e3d9e695 100644
--- a/lib/spack/spack/test/compilers.py
+++ b/lib/spack/spack/test/compilers.py
@@ -369,7 +369,13 @@ def test_clang_version_detection(version_str, expected_version):
'Thread model: posix\n'
'InstalledDir:\n'
'/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA
- '19.0')
+ '19.0.0.73'),
+ ('Arm C/C++/Fortran Compiler version 19.3.1 (build number 75) (based on LLVM 7.0.2)\n' # NOQA
+ 'Target: aarch64--linux-gnu\n'
+ 'Thread model: posix\n'
+ 'InstalledDir:\n'
+ '/opt/arm/arm-hpc-compiler-19.0_Generic-AArch64_RHEL-7_aarch64-linux/bin\n', # NOQA
+ '19.3.1.75')
])
def test_arm_version_detection(version_str, expected_version):
version = spack.compilers.arm.Arm.extract_version_from_output(version_str)
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
index 81b5360869..922d5a11d8 100644
--- a/lib/spack/spack/test/concretize_preferences.py
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -185,36 +185,39 @@ class TestConcretizePreferences(object):
spec.concretize()
assert spec.version == Version('0.2.15.develop')
- def test_no_virtuals_in_packages_yaml(self):
- """Verify that virtuals are not allowed in packages.yaml."""
+ def test_external_mpi(self):
+ # make sure this doesn't give us an external first.
+ spec = Spec('mpi')
+ spec.concretize()
+ assert not spec['mpi'].external
- # set up a packages.yaml file with a vdep as a key. We use
- # syaml.load_config here to make sure source lines in the config are
- # attached to parsed strings, as the error message uses them.
+ # load config
conf = syaml.load_config("""\
-mpi:
+all:
+ providers:
+ mpi: [mpich]
+mpich:
+ buildable: false
paths:
- mpi-with-lapack@2.1: /path/to/lapack
+ mpich@3.0.4: /dummy/path
""")
spack.config.set('packages', conf, scope='concretize')
- # now when we get the packages.yaml config, there should be an error
- with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError):
- spack.package_prefs.get_packages_config()
-
- def test_all_is_not_a_virtual(self):
- """Verify that `all` is allowed in packages.yaml."""
- conf = syaml.load_config("""\
-all:
- variants: [+mpi]
-""")
- spack.config.set('packages', conf, scope='concretize')
+ # ensure that once config is in place, external is used
+ spec = Spec('mpi')
+ spec.concretize()
+ assert spec['mpich'].external_path == '/dummy/path'
- # should be no error for 'all':
- spack.package_prefs.get_packages_config()
+ def test_external_module(self, monkeypatch):
+ """Test that packages can find externals specified by module
- def test_external_mpi(self):
+ The specific code for parsing the module is tested elsewhere.
+ This just tests that the preference is accounted for"""
# make sure this doesn't give us an external first.
+ def mock_module(cmd, module):
+ return 'prepend-path PATH /dummy/path'
+ monkeypatch.setattr(spack.util.module_cmd, 'module', mock_module)
+
spec = Spec('mpi')
spec.concretize()
assert not spec['mpi'].external
@@ -224,10 +227,10 @@ all:
all:
providers:
mpi: [mpich]
-mpich:
+mpi:
buildable: false
- paths:
- mpich@3.0.4: /dummy/path
+ modules:
+ mpich@3.0.4: dummy
""")
spack.config.set('packages', conf, scope='concretize')
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index feb2b9cae4..b8598616d5 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -46,7 +46,19 @@ config_merge_list = {
config_override_list = {
'config': {
- 'build_stage:': ['patha', 'pathb']}}
+ 'build_stage:': ['pathd', 'pathe']}}
+
+config_merge_dict = {
+ 'config': {
+ 'info': {
+ 'a': 3,
+ 'b': 4}}}
+
+config_override_dict = {
+ 'config': {
+ 'info:': {
+ 'a': 7,
+ 'c': 9}}}
@pytest.fixture()
@@ -382,7 +394,7 @@ def test_read_config_override_list(mock_low_high_config, write_config_file):
write_config_file('config', config_override_list, 'high')
assert spack.config.get('config') == {
'install_tree': 'install_tree_path',
- 'build_stage': ['patha', 'pathb']
+ 'build_stage': config_override_list['config']['build_stage:']
}
@@ -857,3 +869,74 @@ def test_dotkit_in_config_does_not_raise(
# we throw a a deprecation warning without raising
assert '_sp_sys_type' in captured[0] # stdout
assert 'Warning' in captured[1] # stderr
+
+
+def test_internal_config_section_override(mock_low_high_config,
+ write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope
+ ('high', {
+ 'config:': {
+ 'build_stage': wanted_list
+ }
+ }))
+ assert mock_low_high_config.get('config:build_stage') == wanted_list
+
+
+def test_internal_config_dict_override(mock_low_high_config,
+ write_config_file):
+ write_config_file('config', config_merge_dict, 'low')
+ wanted_dict = config_override_dict['config']['info:']
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope
+ ('high', config_override_dict))
+ assert mock_low_high_config.get('config:info') == wanted_dict
+
+
+def test_internal_config_list_override(mock_low_high_config,
+ write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope
+ ('high', config_override_list))
+ assert mock_low_high_config.get('config:build_stage') == wanted_list
+
+
+def test_set_section_override(mock_low_high_config, write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ with spack.config.override('config::build_stage', wanted_list):
+ assert mock_low_high_config.get('config:build_stage') == wanted_list
+ assert config_merge_list['config']['build_stage'] == \
+ mock_low_high_config.get('config:build_stage')
+
+
+def test_set_list_override(mock_low_high_config, write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ with spack.config.override('config:build_stage:', wanted_list):
+ assert wanted_list == mock_low_high_config.get('config:build_stage')
+ assert config_merge_list['config']['build_stage'] == \
+ mock_low_high_config.get('config:build_stage')
+
+
+def test_set_dict_override(mock_low_high_config, write_config_file):
+ write_config_file('config', config_merge_dict, 'low')
+ wanted_dict = config_override_dict['config']['info:']
+ with spack.config.override('config:info:', wanted_dict):
+ assert wanted_dict == mock_low_high_config.get('config:info')
+ assert config_merge_dict['config']['info'] == \
+ mock_low_high_config.get('config:info')
+
+
+def test_set_bad_path(config):
+ with pytest.raises(syaml.SpackYAMLError, match='Illegal leading'):
+ with spack.config.override(':bad:path', ''):
+ pass
+
+
+def test_bad_path_double_override(config):
+ with pytest.raises(syaml.SpackYAMLError,
+ match='Meaningless second override'):
+ with spack.config.override('bad::double:override::directive', ''):
+ pass
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 6703742142..8912c0219b 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -301,8 +301,17 @@ def use_configuration(config):
"""Context manager to swap out the global Spack configuration."""
saved = spack.config.config
spack.config.config = config
+
+ # Avoid using real spack configuration that has been cached by other
+ # tests, and avoid polluting the cache with spack test configuration
+ # (including modified configuration)
+ saved_compiler_cache = spack.compilers._cache_config_file
+ spack.compilers._cache_config_file = []
+
yield
+
spack.config.config = saved
+ spack.compilers._cache_config_file = saved_compiler_cache
@contextlib.contextmanager
@@ -427,10 +436,6 @@ def mutable_config(tmpdir_factory, configuration_dir, monkeypatch):
*[spack.config.ConfigScope(name, str(mutable_dir))
for name in ['site', 'system', 'user']])
- # This is essential, otherwise the cache will create weird side effects
- # that will compromise subsequent tests if compilers.yaml is modified
- monkeypatch.setattr(spack.compilers, '_cache_config_file', [])
-
with use_configuration(cfg):
yield cfg
@@ -1035,6 +1040,13 @@ class MockPackage(object):
self.conflicts = {}
self.patches = {}
+ def provides(self, vname):
+ return vname in self.provided
+
+ @property
+ def virtuals_provided(self):
+ return [v.name for v, c in self.provided]
+
class MockPackageMultiRepo(object):
def __init__(self, packages):
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index 08b32f74f1..05cf46dc20 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -52,52 +52,52 @@ def check_mirror():
mirror_root = os.path.join(stage.path, 'test-mirror')
# register mirror with spack config
mirrors = {'spack-mirror-test': 'file://' + mirror_root}
- spack.config.set('mirrors', mirrors)
- with spack.config.override('config:checksum', False):
- specs = [Spec(x).concretized() for x in repos]
- spack.mirror.create(mirror_root, specs)
-
- # Stage directory exists
- assert os.path.isdir(mirror_root)
-
- for spec in specs:
- fetcher = spec.package.fetcher[0]
- per_package_ref = os.path.join(
- spec.name, '-'.join([spec.name, str(spec.version)]))
- mirror_paths = spack.mirror.mirror_archive_paths(
- fetcher,
- per_package_ref)
- expected_path = os.path.join(
- mirror_root, mirror_paths.storage_path)
- assert os.path.exists(expected_path)
-
- # Now try to fetch each package.
- for name, mock_repo in repos.items():
- spec = Spec(name).concretized()
- pkg = spec.package
-
+ with spack.config.override('mirrors', mirrors):
with spack.config.override('config:checksum', False):
- with pkg.stage:
- pkg.do_stage(mirror_only=True)
-
- # Compare the original repo with the expanded archive
- original_path = mock_repo.path
- if 'svn' in name:
- # have to check out the svn repo to compare.
- original_path = os.path.join(
- mock_repo.path, 'checked_out')
-
- svn = which('svn', required=True)
- svn('checkout', mock_repo.url, original_path)
-
- dcmp = filecmp.dircmp(
- original_path, pkg.stage.source_path)
-
- # make sure there are no new files in the expanded
- # tarball
- assert not dcmp.right_only
- # and that all original files are present.
- assert all(l in exclude for l in dcmp.left_only)
+ specs = [Spec(x).concretized() for x in repos]
+ spack.mirror.create(mirror_root, specs)
+
+ # Stage directory exists
+ assert os.path.isdir(mirror_root)
+
+ for spec in specs:
+ fetcher = spec.package.fetcher[0]
+ per_package_ref = os.path.join(
+ spec.name, '-'.join([spec.name, str(spec.version)]))
+ mirror_paths = spack.mirror.mirror_archive_paths(
+ fetcher,
+ per_package_ref)
+ expected_path = os.path.join(
+ mirror_root, mirror_paths.storage_path)
+ assert os.path.exists(expected_path)
+
+ # Now try to fetch each package.
+ for name, mock_repo in repos.items():
+ spec = Spec(name).concretized()
+ pkg = spec.package
+
+ with spack.config.override('config:checksum', False):
+ with pkg.stage:
+ pkg.do_stage(mirror_only=True)
+
+ # Compare the original repo with the expanded archive
+ original_path = mock_repo.path
+ if 'svn' in name:
+ # have to check out the svn repo to compare.
+ original_path = os.path.join(
+ mock_repo.path, 'checked_out')
+
+ svn = which('svn', required=True)
+ svn('checkout', mock_repo.url, original_path)
+
+ dcmp = filecmp.dircmp(
+ original_path, pkg.stage.source_path)
+
+ # make sure there are no new files in the expanded
+ # tarball
+ assert not dcmp.right_only
+ # and that all original files are present.
+ assert all(l in exclude for l in dcmp.left_only)
def test_url_mirror(mock_archive):
@@ -213,7 +213,7 @@ def test_mirror_cache_symlinks(tmpdir):
"""
cosmetic_path = 'zlib/zlib-1.2.11.tar.gz'
global_path = '_source-cache/archive/c3/c3e5.tar.gz'
- cache = spack.caches.MirrorCache(str(tmpdir))
+ cache = spack.caches.MirrorCache(str(tmpdir), False)
reference = spack.mirror.MirrorReference(cosmetic_path, global_path)
cache.store(MockFetcher(), reference.storage_path)
diff --git a/lib/spack/spack/test/module_parsing.py b/lib/spack/spack/test/module_parsing.py
index bbe18b1ad0..0bf485913f 100644
--- a/lib/spack/spack/test/module_parsing.py
+++ b/lib/spack/spack/test/module_parsing.py
@@ -20,28 +20,11 @@ test_module_lines = ['prepend-path LD_LIBRARY_PATH /path/to/lib',
'setenv LDFLAGS -L/path/to/lib',
'prepend-path PATH /path/to/bin']
+_test_template = "'. %s 2>&1' % args[1]"
-@pytest.fixture
-def module_function_test_mode():
- old_mode = spack.util.module_cmd._test_mode
- spack.util.module_cmd._test_mode = True
- yield
-
- spack.util.module_cmd._test_mode = old_mode
-
-
-@pytest.fixture
-def save_module_func():
- old_func = spack.util.module_cmd.module
-
- yield
-
- spack.util.module_cmd.module = old_func
-
-
-def test_module_function_change_env(tmpdir, working_env,
- module_function_test_mode):
+def test_module_function_change_env(tmpdir, working_env, monkeypatch):
+ monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template)
src_file = str(tmpdir.join('src_me'))
with open(src_file, 'w') as f:
f.write('export TEST_MODULE_ENV_VAR=TEST_SUCCESS\n')
@@ -53,7 +36,8 @@ def test_module_function_change_env(tmpdir, working_env,
assert os.environ['NOT_AFFECTED'] == "NOT_AFFECTED"
-def test_module_function_no_change(tmpdir, module_function_test_mode):
+def test_module_function_no_change(tmpdir, monkeypatch):
+ monkeypatch.setattr(spack.util.module_cmd, '_cmd_template', _test_template)
src_file = str(tmpdir.join('src_me'))
with open(src_file, 'w') as f:
f.write('echo TEST_MODULE_FUNCTION_PRINT')
@@ -65,11 +49,11 @@ def test_module_function_no_change(tmpdir, module_function_test_mode):
assert os.environ == old_env
-def test_get_path_from_module_faked(save_module_func):
+def test_get_path_from_module_faked(monkeypatch):
for line in test_module_lines:
def fake_module(*args):
return line
- spack.util.module_cmd.module = fake_module
+ monkeypatch.setattr(spack.util.module_cmd, 'module', fake_module)
path = get_path_from_module('mod')
assert path == '/path/to'
diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py
index b3351ffb49..d540ac663e 100644
--- a/lib/spack/spack/test/package_class.py
+++ b/lib/spack/spack/test/package_class.py
@@ -11,12 +11,17 @@ static DSL metadata for packages.
"""
import pytest
+import spack.package
import spack.repo
-@pytest.fixture
-def mpileaks_possible_deps(mock_packages):
- mpi_names = [spec.name for spec in spack.repo.path.providers_for('mpi')]
+@pytest.fixture(scope="module")
+def mpi_names(mock_repo_path):
+ return [spec.name for spec in mock_repo_path.providers_for('mpi')]
+
+
+@pytest.fixture()
+def mpileaks_possible_deps(mock_packages, mpi_names):
possible = {
'callpath': set(['dyninst'] + mpi_names),
'dyninst': set(['libdwarf', 'libelf']),
@@ -34,47 +39,72 @@ def mpileaks_possible_deps(mock_packages):
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
mpileaks = spack.repo.get('mpileaks')
- assert (mpileaks.possible_dependencies(expand_virtuals=True) ==
- mpileaks_possible_deps)
+ assert mpileaks_possible_deps == (
+ mpileaks.possible_dependencies(expand_virtuals=True))
- assert mpileaks.possible_dependencies(expand_virtuals=False) == {
- 'callpath': set(['dyninst']),
+ assert {
+ 'callpath': set(['dyninst', 'mpi']),
'dyninst': set(['libdwarf', 'libelf']),
'libdwarf': set(['libelf']),
'libelf': set(),
'mpi': set(),
- 'mpileaks': set(['callpath']),
- }
+ 'mpileaks': set(['callpath', 'mpi']),
+ } == mpileaks.possible_dependencies(expand_virtuals=False)
+
+
+def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps):
+ mpileaks = spack.repo.get('mpileaks')
+ deps = mpileaks.possible_dependencies(transitive=False,
+ expand_virtuals=False)
+
+ assert {
+ 'callpath': set(),
+ 'mpi': set(),
+ 'mpileaks': set(['callpath', 'mpi']),
+ } == deps
+
+
+def test_possible_dependencies_virtual(mock_packages, mpi_names):
+ expected = dict(
+ (name, set(spack.repo.get(name).dependencies))
+ for name in mpi_names
+ )
+
+ # only one mock MPI has a dependency
+ expected['fake'] = set()
+
+ assert expected == spack.package.possible_dependencies(
+ "mpi", transitive=False)
def test_possible_dependencies_missing(mock_packages):
md = spack.repo.get("missing-dependency")
missing = {}
md.possible_dependencies(transitive=True, missing=missing)
- assert missing["missing-dependency"] == set([
+ assert set([
"this-is-a-missing-dependency"
- ])
+ ]) == missing["missing-dependency"]
def test_possible_dependencies_with_deptypes(mock_packages):
dtbuild1 = spack.repo.get('dtbuild1')
- assert dtbuild1.possible_dependencies(deptype=('link', 'run')) == {
+ assert {
'dtbuild1': set(['dtrun2', 'dtlink2']),
'dtlink2': set(),
'dtrun2': set(),
- }
+ } == dtbuild1.possible_dependencies(deptype=('link', 'run'))
- assert dtbuild1.possible_dependencies(deptype=('build')) == {
+ assert {
'dtbuild1': set(['dtbuild2', 'dtlink2']),
'dtbuild2': set(),
'dtlink2': set(),
- }
+ } == dtbuild1.possible_dependencies(deptype=('build'))
- assert dtbuild1.possible_dependencies(deptype=('link')) == {
+ assert {
'dtbuild1': set(['dtlink2']),
'dtlink2': set(),
- }
+ } == dtbuild1.possible_dependencies(deptype=('link'))
def test_possible_dependencies_with_multiple_classes(
@@ -88,4 +118,4 @@ def test_possible_dependencies_with_multiple_classes(
'dt-diamond-bottom': set(),
})
- assert spack.package.possible_dependencies(*pkgs) == expected
+ assert expected == spack.package.possible_dependencies(*pkgs)
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index 299c56481e..ffaad396c1 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -402,3 +402,24 @@ def test_bundle_patch_directive(mock_directive_bundle,
match="Patches are not allowed"):
patch = spack.directives.patch('mock/patch.txt')
patch(mock_directive_bundle)
+
+
+def test_fetch_options(mock_packages, config):
+ """Test fetch options inference."""
+
+ pkg = spack.repo.get('fetch-options')
+
+ fetcher = spack.fetch_strategy.for_package_version(pkg, '1.0')
+ assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
+ assert fetcher.digest == 'abc10'
+ assert fetcher.extra_options == {'timeout': 42, 'cookie': 'foobar'}
+
+ fetcher = spack.fetch_strategy.for_package_version(pkg, '1.1')
+ assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
+ assert fetcher.digest == 'abc11'
+ assert fetcher.extra_options == {'timeout': 65}
+
+ fetcher = spack.fetch_strategy.for_package_version(pkg, '1.2')
+ assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
+ assert fetcher.digest == 'abc12'
+ assert fetcher.extra_options == {'cookie': 'baz'}
diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py
index 39d12df7b7..39da7c3ae5 100644
--- a/lib/spack/spack/test/packaging.py
+++ b/lib/spack/spack/test/packaging.py
@@ -8,10 +8,11 @@ This test checks the binary packaging infrastructure
"""
import os
import stat
-import sys
import shutil
import pytest
import argparse
+import re
+import platform
from llnl.util.filesystem import mkdirp
@@ -19,16 +20,17 @@ import spack.repo
import spack.store
import spack.binary_distribution as bindist
import spack.cmd.buildcache as buildcache
-import spack.util.gpg
from spack.spec import Spec
from spack.paths import mock_gpg_keys_path
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
from spack.relocate import needs_binary_relocation, needs_text_relocation
-from spack.relocate import strings_contains_installroot
-from spack.relocate import get_patchelf, relocate_text, relocate_links
-from spack.relocate import substitute_rpath, get_relative_rpaths
-from spack.relocate import macho_replace_paths, macho_make_paths_relative
-from spack.relocate import modify_macho_object, macho_get_paths
+from spack.relocate import relocate_text, relocate_links
+from spack.relocate import get_relative_elf_rpaths
+from spack.relocate import get_normalized_elf_rpaths
+from spack.relocate import macho_make_paths_relative
+from spack.relocate import macho_make_paths_normal
+from spack.relocate import set_placeholder, macho_find_paths
+from spack.relocate import file_is_relocatable
def has_gpg():
@@ -50,9 +52,9 @@ def fake_fetchify(url, pkg):
@pytest.mark.usefixtures('install_mockery', 'mock_gnupghome')
def test_buildcache(mock_archive, tmpdir):
# tweak patchelf to only do a download
- spec = Spec("patchelf")
- spec.concretize()
- pkg = spack.repo.get(spec)
+ pspec = Spec("patchelf")
+ pspec.concretize()
+ pkg = spack.repo.get(pspec)
fake_fetchify(pkg.fetcher, pkg)
mkdirp(os.path.join(pkg.prefix, "bin"))
patchelfscr = os.path.join(pkg.prefix, "bin", "patchelf")
@@ -71,7 +73,7 @@ echo $PATH"""
pkg = spec.package
fake_fetchify(mock_archive.url, pkg)
pkg.do_install()
- pkghash = '/' + spec.dag_hash(7)
+ pkghash = '/' + str(spec.dag_hash(7))
# Put some non-relocatable file in there
filename = os.path.join(spec.prefix, "dummy.txt")
@@ -99,88 +101,69 @@ echo $PATH"""
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
+ create_args = ['create', '-a', '-f', '-d', mirror_path, pkghash]
# Create a private key to sign package with if gpg2 available
if spack.util.gpg.Gpg.gpg():
spack.util.gpg.Gpg.create(name='test key 1', expires='0',
email='spack@googlegroups.com',
comment='Spack test key')
- # Create build cache with signing
- args = parser.parse_args(['create', '-d', mirror_path, str(spec)])
- buildcache.buildcache(parser, args)
-
- # Uninstall the package
- pkg.do_uninstall(force=True)
-
- # test overwrite install
- args = parser.parse_args(['install', '-f', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- files = os.listdir(spec.prefix)
+ else:
+ create_args.insert(create_args.index('-a'), '-u')
- # create build cache with relative path and signing
- args = parser.parse_args(
- ['create', '-d', mirror_path, '-f', '-r', str(spec)])
- buildcache.buildcache(parser, args)
+ args = parser.parse_args(create_args)
+ buildcache.buildcache(parser, args)
+ # trigger overwrite warning
+ buildcache.buildcache(parser, args)
- # Uninstall the package
- pkg.do_uninstall(force=True)
+ # Uninstall the package
+ pkg.do_uninstall(force=True)
- # install build cache with verification
- args = parser.parse_args(['install', str(spec)])
- buildcache.install_tarball(spec, args)
+ install_args = ['install', '-a', '-f', pkghash]
+ if not spack.util.gpg.Gpg.gpg():
+ install_args.insert(install_args.index('-a'), '-u')
+ args = parser.parse_args(install_args)
+ # Test install
+ buildcache.buildcache(parser, args)
- # test overwrite install
- args = parser.parse_args(['install', '-f', str(pkghash)])
- buildcache.buildcache(parser, args)
+ files = os.listdir(spec.prefix)
- else:
- # create build cache without signing
- args = parser.parse_args(
- ['create', '-d', mirror_path, '-f', '-u', str(spec)])
- buildcache.buildcache(parser, args)
-
- # Uninstall the package
- pkg.do_uninstall(force=True)
-
- # install build cache without verification
- args = parser.parse_args(['install', '-u', str(spec)])
- buildcache.install_tarball(spec, args)
-
- files = os.listdir(spec.prefix)
- assert 'link_to_dummy.txt' in files
- assert 'dummy.txt' in files
- # test overwrite install without verification
- args = parser.parse_args(['install', '-f', '-u', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- # create build cache with relative path
- args = parser.parse_args(
- ['create', '-d', mirror_path, '-f', '-r', '-u', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- # Uninstall the package
- pkg.do_uninstall(force=True)
-
- # install build cache
- args = parser.parse_args(['install', '-u', str(spec)])
- buildcache.install_tarball(spec, args)
-
- # test overwrite install
- args = parser.parse_args(['install', '-f', '-u', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- files = os.listdir(spec.prefix)
- assert 'link_to_dummy.txt' in files
- assert 'dummy.txt' in files
- assert os.path.realpath(
- os.path.join(spec.prefix, 'link_to_dummy.txt')
- ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
+ assert 'link_to_dummy.txt' in files
+ assert 'dummy.txt' in files
# Validate the relocation information
buildinfo = bindist.read_buildinfo_file(spec.prefix)
assert(buildinfo['relocate_textfiles'] == ['dummy.txt'])
assert(buildinfo['relocate_links'] == ['link_to_dummy.txt'])
+ # create build cache with relative path
+ create_args.insert(create_args.index('-a'), '-f')
+ create_args.insert(create_args.index('-a'), '-r')
+ args = parser.parse_args(create_args)
+ buildcache.buildcache(parser, args)
+
+ # Uninstall the package
+ pkg.do_uninstall(force=True)
+
+ if not spack.util.gpg.Gpg.gpg():
+ install_args.insert(install_args.index('-a'), '-u')
+ args = parser.parse_args(install_args)
+ buildcache.buildcache(parser, args)
+
+ # test overwrite install
+ install_args.insert(install_args.index('-a'), '-f')
+ args = parser.parse_args(install_args)
+ buildcache.buildcache(parser, args)
+
+ files = os.listdir(spec.prefix)
+ assert 'link_to_dummy.txt' in files
+ assert 'dummy.txt' in files
+# assert os.path.realpath(
+# os.path.join(spec.prefix, 'link_to_dummy.txt')
+# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
+
+ args = parser.parse_args(['keys'])
+ buildcache.buildcache(parser, args)
+
args = parser.parse_args(['list'])
buildcache.buildcache(parser, args)
@@ -200,6 +183,9 @@ echo $PATH"""
args = parser.parse_args(['keys', '-f'])
buildcache.buildcache(parser, args)
+ args = parser.parse_args(['keys', '-i', '-t'])
+ buildcache.buildcache(parser, args)
+
# unregister mirror with spack config
mirrors = {}
spack.config.set('mirrors', mirrors)
@@ -210,7 +196,10 @@ echo $PATH"""
bindist._cached_specs = set()
+@pytest.mark.usefixtures('install_mockery')
def test_relocate_text(tmpdir):
+ spec = Spec('trivial-install-test-package')
+ spec.concretize()
with tmpdir.as_cwd():
# Validate the text path replacement
old_dir = '/home/spack/opt/spack'
@@ -220,24 +209,46 @@ def test_relocate_text(tmpdir):
script.close()
filenames = [filename]
new_dir = '/opt/rh/devtoolset/'
- relocate_text(filenames, oldpath=old_dir, newpath=new_dir,
- oldprefix=old_dir, newprefix=new_dir)
+ relocate_text(filenames, old_dir, new_dir,
+ old_dir, new_dir,
+ old_dir, new_dir,
+ {old_dir: new_dir})
with open(filename, "r")as script:
for line in script:
assert(new_dir in line)
- assert(strings_contains_installroot(filename, old_dir) is False)
+ assert(file_is_relocatable(os.path.realpath(filename)))
+ # Remove cached binary specs since we deleted the mirror
+ bindist._cached_specs = set()
def test_relocate_links(tmpdir):
with tmpdir.as_cwd():
- old_dir = '/home/spack/opt/spack'
- filename = 'link.ln'
- old_src = os.path.join(old_dir, filename)
- os.symlink(old_src, filename)
- filenames = [filename]
- new_dir = '/opt/rh/devtoolset'
- relocate_links(filenames, old_dir, new_dir)
- assert os.path.realpath(filename) == os.path.join(new_dir, filename)
+ old_layout_root = os.path.join(
+ '%s' % tmpdir, 'home', 'spack', 'opt', 'spack')
+ old_install_prefix = os.path.join(
+ '%s' % old_layout_root, 'debian6', 'test')
+ old_binname = os.path.join(old_install_prefix, 'binfile')
+ placeholder = set_placeholder(old_layout_root)
+ re.sub(old_layout_root, placeholder, old_binname)
+ filenames = ['link.ln', 'outsideprefix.ln']
+ new_layout_root = os.path.join(
+ '%s' % tmpdir, 'opt', 'rh', 'devtoolset')
+ new_install_prefix = os.path.join(
+ '%s' % new_layout_root, 'test', 'debian6')
+ new_linkname = os.path.join(new_install_prefix, 'link.ln')
+ new_linkname2 = os.path.join(new_install_prefix, 'outsideprefix.ln')
+ new_binname = os.path.join(new_install_prefix, 'binfile')
+ mkdirp(new_install_prefix)
+ with open(new_binname, 'w') as f:
+ f.write('\n')
+ os.utime(new_binname, None)
+ os.symlink(old_binname, new_linkname)
+ os.symlink('/usr/lib/libc.so', new_linkname2)
+ relocate_links(filenames, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix,
+ {old_install_prefix: new_install_prefix})
+ assert os.readlink(new_linkname) == new_binname
+ assert os.readlink(new_linkname2) == '/usr/lib/libc.so'
def test_needs_relocation():
@@ -246,16 +257,223 @@ def test_needs_relocation():
assert needs_binary_relocation('application', 'x-executable')
assert not needs_binary_relocation('application', 'x-octet-stream')
assert not needs_binary_relocation('text', 'x-')
-
assert needs_text_relocation('text', 'x-')
assert not needs_text_relocation('symbolic link to', 'x-')
assert needs_binary_relocation('application', 'x-mach-binary')
-def test_macho_paths():
-
- out = macho_make_paths_relative('/Users/Shares/spack/pkgC/lib/libC.dylib',
+def test_replace_paths(tmpdir):
+ with tmpdir.as_cwd():
+ suffix = 'dylib' if platform.system().lower() == 'darwin' else 'so'
+ hash_a = '53moz6jwnw3xpiztxwhc4us26klribws'
+ hash_b = 'tk62dzu62kd4oh3h3heelyw23hw2sfee'
+ hash_c = 'hdkhduizmaddpog6ewdradpobnbjwsjl'
+ hash_d = 'hukkosc7ahff7o65h6cdhvcoxm57d4bw'
+ hash_loco = 'zy4oigsc4eovn5yhr2lk4aukwzoespob'
+
+ prefix2hash = dict()
+
+ old_spack_dir = os.path.join('%s' % tmpdir,
+ 'Users', 'developer', 'spack')
+ mkdirp(old_spack_dir)
+
+ oldprefix_a = os.path.join('%s' % old_spack_dir, 'pkgA-%s' % hash_a)
+ oldlibdir_a = os.path.join('%s' % oldprefix_a, 'lib')
+ mkdirp(oldlibdir_a)
+ prefix2hash[str(oldprefix_a)] = hash_a
+
+ oldprefix_b = os.path.join('%s' % old_spack_dir, 'pkgB-%s' % hash_b)
+ oldlibdir_b = os.path.join('%s' % oldprefix_b, 'lib')
+ mkdirp(oldlibdir_b)
+ prefix2hash[str(oldprefix_b)] = hash_b
+
+ oldprefix_c = os.path.join('%s' % old_spack_dir, 'pkgC-%s' % hash_c)
+ oldlibdir_c = os.path.join('%s' % oldprefix_c, 'lib')
+ oldlibdir_cc = os.path.join('%s' % oldlibdir_c, 'C')
+ mkdirp(oldlibdir_c)
+ prefix2hash[str(oldprefix_c)] = hash_c
+
+ oldprefix_d = os.path.join('%s' % old_spack_dir, 'pkgD-%s' % hash_d)
+ oldlibdir_d = os.path.join('%s' % oldprefix_d, 'lib')
+ mkdirp(oldlibdir_d)
+ prefix2hash[str(oldprefix_d)] = hash_d
+
+ oldprefix_local = os.path.join('%s' % tmpdir, 'usr', 'local')
+ oldlibdir_local = os.path.join('%s' % oldprefix_local, 'lib')
+ mkdirp(oldlibdir_local)
+ prefix2hash[str(oldprefix_local)] = hash_loco
+ libfile_a = 'libA.%s' % suffix
+ libfile_b = 'libB.%s' % suffix
+ libfile_c = 'libC.%s' % suffix
+ libfile_d = 'libD.%s' % suffix
+ libfile_loco = 'libloco.%s' % suffix
+ old_libnames = [os.path.join(oldlibdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b),
+ os.path.join(oldlibdir_c, libfile_c),
+ os.path.join(oldlibdir_d, libfile_d),
+ os.path.join(oldlibdir_local, libfile_loco)]
+
+ for old_libname in old_libnames:
+ with open(old_libname, 'a'):
+ os.utime(old_libname, None)
+
+ hash2prefix = dict()
+
+ new_spack_dir = os.path.join('%s' % tmpdir, 'Users', 'Shared',
+ 'spack')
+ mkdirp(new_spack_dir)
+
+ prefix_a = os.path.join(new_spack_dir, 'pkgA-%s' % hash_a)
+ libdir_a = os.path.join(prefix_a, 'lib')
+ mkdirp(libdir_a)
+ hash2prefix[hash_a] = str(prefix_a)
+
+ prefix_b = os.path.join(new_spack_dir, 'pkgB-%s' % hash_b)
+ libdir_b = os.path.join(prefix_b, 'lib')
+ mkdirp(libdir_b)
+ hash2prefix[hash_b] = str(prefix_b)
+
+ prefix_c = os.path.join(new_spack_dir, 'pkgC-%s' % hash_c)
+ libdir_c = os.path.join(prefix_c, 'lib')
+ libdir_cc = os.path.join(libdir_c, 'C')
+ mkdirp(libdir_cc)
+ hash2prefix[hash_c] = str(prefix_c)
+
+ prefix_d = os.path.join(new_spack_dir, 'pkgD-%s' % hash_d)
+ libdir_d = os.path.join(prefix_d, 'lib')
+ mkdirp(libdir_d)
+ hash2prefix[hash_d] = str(prefix_d)
+
+ prefix_local = os.path.join('%s' % tmpdir, 'usr', 'local')
+ libdir_local = os.path.join(prefix_local, 'lib')
+ mkdirp(libdir_local)
+ hash2prefix[hash_loco] = str(prefix_local)
+
+ new_libnames = [os.path.join(libdir_a, libfile_a),
+ os.path.join(libdir_b, libfile_b),
+ os.path.join(libdir_cc, libfile_c),
+ os.path.join(libdir_d, libfile_d),
+ os.path.join(libdir_local, libfile_loco)]
+
+ for new_libname in new_libnames:
+ with open(new_libname, 'a'):
+ os.utime(new_libname, None)
+
+ prefix2prefix = dict()
+ for prefix, hash in prefix2hash.items():
+ prefix2prefix[prefix] = hash2prefix[hash]
+
+ out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
+ oldlibdir_c,
+ oldlibdir_cc, oldlibdir_local],
+ [os.path.join(oldlibdir_a,
+ libfile_a),
+ os.path.join(oldlibdir_b,
+ libfile_b),
+ os.path.join(oldlibdir_local,
+ libfile_loco)],
+ os.path.join(oldlibdir_cc,
+ libfile_c),
+ old_spack_dir,
+ prefix2prefix
+ )
+ assert out_dict == {oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ os.path.join(oldlibdir_a, libfile_a):
+ os.path.join(libdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b):
+ os.path.join(libdir_b, libfile_b),
+ os.path.join(oldlibdir_local, libfile_loco):
+ os.path.join(libdir_local, libfile_loco),
+ os.path.join(oldlibdir_cc, libfile_c):
+ os.path.join(libdir_cc, libfile_c)}
+
+ out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
+ oldlibdir_c,
+ oldlibdir_cc,
+ oldlibdir_local],
+ [os.path.join(oldlibdir_a,
+ libfile_a),
+ os.path.join(oldlibdir_b,
+ libfile_b),
+ os.path.join(oldlibdir_cc,
+ libfile_c),
+ os.path.join(oldlibdir_local,
+ libfile_loco)],
+ None,
+ old_spack_dir,
+ prefix2prefix
+ )
+ assert out_dict == {oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ os.path.join(oldlibdir_a, libfile_a):
+ os.path.join(libdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b):
+ os.path.join(libdir_b, libfile_b),
+ os.path.join(oldlibdir_local, libfile_loco):
+ os.path.join(libdir_local, libfile_loco),
+ os.path.join(oldlibdir_cc, libfile_c):
+ os.path.join(libdir_cc, libfile_c)}
+
+ out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
+ oldlibdir_c, oldlibdir_cc,
+ oldlibdir_local],
+ ['@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_c,
+ '@rpath/%s' % libfile_loco],
+ None,
+ old_spack_dir,
+ prefix2prefix
+ )
+
+ assert out_dict == {'@rpath/%s' % libfile_a:
+ '@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b:
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_c:
+ '@rpath/%s' % libfile_c,
+ '@rpath/%s' % libfile_loco:
+ '@rpath/%s' % libfile_loco,
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ }
+
+ out_dict = macho_find_paths([oldlibdir_a,
+ oldlibdir_b,
+ oldlibdir_d,
+ oldlibdir_local],
+ ['@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_loco],
+ None,
+ old_spack_dir,
+ prefix2prefix)
+ assert out_dict == {'@rpath/%s' % libfile_a:
+ '@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b:
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_loco:
+ '@rpath/%s' % libfile_loco,
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_d: libdir_d,
+ libdir_local: libdir_local,
+ }
+
+
+def test_macho_make_paths():
+ out = macho_make_paths_relative('/Users/Shared/spack/pkgC/lib/libC.dylib',
'/Users/Shared/spack',
('/Users/Shared/spack/pkgA/lib',
'/Users/Shared/spack/pkgB/lib',
@@ -264,13 +482,43 @@ def test_macho_paths():
'/Users/Shared/spack/pkgB/libB.dylib',
'/usr/local/lib/libloco.dylib'),
'/Users/Shared/spack/pkgC/lib/libC.dylib')
- assert out == (['@loader_path/../../../../Shared/spack/pkgA/lib',
- '@loader_path/../../../../Shared/spack/pkgB/lib',
- '/usr/local/lib'],
- ['@loader_path/../../../../Shared/spack/pkgA/libA.dylib',
- '@loader_path/../../../../Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'],
- '@rpath/libC.dylib')
+ assert out == {'/Users/Shared/spack/pkgA/lib':
+ '@loader_path/../../pkgA/lib',
+ '/Users/Shared/spack/pkgB/lib':
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '/Users/Shared/spack/pkgA/libA.dylib':
+ '@loader_path/../../pkgA/libA.dylib',
+ '/Users/Shared/spack/pkgB/libB.dylib':
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib',
+ '/Users/Shared/spack/pkgC/lib/libC.dylib':
+ '@rpath/libC.dylib'}
+
+ out = macho_make_paths_normal('/Users/Shared/spack/pkgC/lib/libC.dylib',
+ ('@loader_path/../../pkgA/lib',
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib'),
+ ('@loader_path/../../pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib'),
+ '@rpath/libC.dylib')
+
+ assert out == {'@rpath/libC.dylib':
+ '/Users/Shared/spack/pkgC/lib/libC.dylib',
+ '@loader_path/../../pkgA/lib':
+ '/Users/Shared/spack/pkgA/lib',
+ '@loader_path/../../pkgB/lib':
+ '/Users/Shared/spack/pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '@loader_path/../../pkgA/libA.dylib':
+ '/Users/Shared/spack/pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib':
+ '/Users/Shared/spack/pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib'
+ }
out = macho_make_paths_relative('/Users/Shared/spack/pkgC/bin/exeC',
'/Users/Shared/spack',
@@ -281,98 +529,47 @@ def test_macho_paths():
'/Users/Shared/spack/pkgB/libB.dylib',
'/usr/local/lib/libloco.dylib'), None)
- assert out == (['@loader_path/../../pkgA/lib',
- '@loader_path/../../pkgB/lib',
- '/usr/local/lib'],
- ['@loader_path/../../pkgA/libA.dylib',
- '@loader_path/../../pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'], None)
-
- out = macho_replace_paths('/Users/Shared/spack',
- '/Applications/spack',
- ('/Users/Shared/spack/pkgA/lib',
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib'),
- ('/Users/Shared/spack/pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'),
- '/Users/Shared/spack/pkgC/lib/libC.dylib')
- assert out == (['/Applications/spack/pkgA/lib',
- '/Applications/spack/pkgB/lib',
- '/usr/local/lib'],
- ['/Applications/spack/pkgA/libA.dylib',
- '/Applications/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'],
- '/Applications/spack/pkgC/lib/libC.dylib')
-
- out = macho_replace_paths('/Users/Shared/spack',
- '/Applications/spack',
- ('/Users/Shared/spack/pkgA/lib',
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib'),
- ('/Users/Shared/spack/pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'),
- None)
- assert out == (['/Applications/spack/pkgA/lib',
- '/Applications/spack/pkgB/lib',
- '/usr/local/lib'],
- ['/Applications/spack/pkgA/libA.dylib',
- '/Applications/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'],
- None)
+ assert out == {'/Users/Shared/spack/pkgA/lib':
+ '@loader_path/../../pkgA/lib',
+ '/Users/Shared/spack/pkgB/lib':
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '/Users/Shared/spack/pkgA/libA.dylib':
+ '@loader_path/../../pkgA/libA.dylib',
+ '/Users/Shared/spack/pkgB/libB.dylib':
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib'}
+
+ out = macho_make_paths_normal('/Users/Shared/spack/pkgC/bin/exeC',
+ ('@loader_path/../../pkgA/lib',
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib'),
+ ('@loader_path/../../pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib'),
+ None)
+
+ assert out == {'@loader_path/../../pkgA/lib':
+ '/Users/Shared/spack/pkgA/lib',
+ '@loader_path/../../pkgB/lib':
+ '/Users/Shared/spack/pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '@loader_path/../../pkgA/libA.dylib':
+ '/Users/Shared/spack/pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib':
+ '/Users/Shared/spack/pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib'}
def test_elf_paths():
- out = get_relative_rpaths(
+ out = get_relative_elf_rpaths(
'/usr/bin/test', '/usr',
('/usr/lib', '/usr/lib64', '/opt/local/lib'))
assert out == ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib']
- out = substitute_rpath(
- ('/usr/lib', '/usr/lib64', '/opt/local/lib'), '/usr', '/opt')
- assert out == ['/opt/lib', '/opt/lib64', '/opt/local/lib']
-
-
-@pytest.mark.skipif(sys.platform != 'darwin',
- reason="only works with Mach-o objects")
-def test_relocate_macho(tmpdir):
- with tmpdir.as_cwd():
-
- get_patchelf() # this does nothing on Darwin
-
- rpaths, deps, idpath = macho_get_paths('/bin/bash')
- nrpaths, ndeps, nid = macho_make_paths_relative('/bin/bash', '/usr',
- rpaths, deps, idpath)
- shutil.copyfile('/bin/bash', 'bash')
- modify_macho_object('bash',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
-
- rpaths, deps, idpath = macho_get_paths('/bin/bash')
- nrpaths, ndeps, nid = macho_replace_paths('/usr', '/opt',
- rpaths, deps, idpath)
- shutil.copyfile('/bin/bash', 'bash')
- modify_macho_object('bash',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
-
- path = '/usr/lib/libncurses.5.4.dylib'
- rpaths, deps, idpath = macho_get_paths(path)
- nrpaths, ndeps, nid = macho_make_paths_relative(path, '/usr',
- rpaths, deps, idpath)
- shutil.copyfile(
- '/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
- modify_macho_object('libncurses.5.4.dylib',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
-
- rpaths, deps, idpath = macho_get_paths(path)
- nrpaths, ndeps, nid = macho_replace_paths('/usr', '/opt',
- rpaths, deps, idpath)
- shutil.copyfile(
- '/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
- modify_macho_object(
- 'libncurses.5.4.dylib',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
+ out = get_normalized_elf_rpaths(
+ '/usr/bin/test',
+ ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib'])
+ assert out == ['/usr/lib', '/usr/lib64', '/opt/local/lib']
diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py
index 113bdcf66a..0a9e9f7f0a 100644
--- a/lib/spack/spack/test/relocate.py
+++ b/lib/spack/spack/test/relocate.py
@@ -3,15 +3,18 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import collections
import os.path
import platform
import shutil
-import pytest
-
import llnl.util.filesystem
+import pytest
+import spack.architecture
+import spack.concretize
import spack.paths
import spack.relocate
+import spack.spec
import spack.store
import spack.tengine
import spack.util.executable
@@ -45,6 +48,47 @@ def source_file(tmpdir, is_relocatable):
return src
+@pytest.fixture(params=['which_found', 'installed', 'to_be_installed'])
+def expected_patchelf_path(request, mutable_database, monkeypatch):
+ """Prepare the stage to tests different cases that can occur
+ when searching for patchelf.
+ """
+ case = request.param
+
+ # Mock the which function
+ which_fn = {
+ 'which_found': lambda x: collections.namedtuple(
+ '_', ['path']
+ )('/usr/bin/patchelf')
+ }
+ monkeypatch.setattr(
+ spack.util.executable, 'which',
+ which_fn.setdefault(case, lambda x: None)
+ )
+ if case == 'which_found':
+ return '/usr/bin/patchelf'
+
+ # TODO: Mock a case for Darwin architecture
+
+ spec = spack.spec.Spec('patchelf')
+ spec.concretize()
+
+ patchelf_cls = type(spec.package)
+ do_install = patchelf_cls.do_install
+ expected_path = os.path.join(spec.prefix.bin, 'patchelf')
+
+ def do_install_mock(self, **kwargs):
+ do_install(self, fake=True)
+ with open(expected_path):
+ pass
+
+ monkeypatch.setattr(patchelf_cls, 'do_install', do_install_mock)
+ if case == 'installed':
+ spec.package.do_install()
+
+ return expected_path
+
+
@pytest.mark.requires_executables(
'/usr/bin/gcc', 'patchelf', 'strings', 'file'
)
@@ -64,7 +108,7 @@ def test_file_is_relocatable(source_file, is_relocatable):
'patchelf', 'strings', 'file'
)
def test_patchelf_is_relocatable():
- patchelf = spack.relocate.get_patchelf()
+ patchelf = spack.relocate._patchelf()
assert llnl.util.filesystem.is_exe(patchelf)
assert spack.relocate.file_is_relocatable(patchelf)
@@ -87,3 +131,12 @@ def test_file_is_relocatable_errors(tmpdir):
with pytest.raises(ValueError) as exc_info:
spack.relocate.file_is_relocatable('delete.me')
assert 'is not an absolute path' in str(exc_info.value)
+
+
+@pytest.mark.skipif(
+ platform.system().lower() != 'linux',
+ reason='implementation for MacOS still missing'
+)
+def test_search_patchelf(expected_patchelf_path):
+ current = spack.relocate._patchelf()
+ assert current == expected_patchelf_path
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 419a39968e..e031f02c25 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -189,7 +189,7 @@ def test_conditional_dep_with_user_constraints():
assert ('y@3' in spec)
-@pytest.mark.usefixtures('mutable_mock_repo')
+@pytest.mark.usefixtures('mutable_mock_repo', 'config')
class TestSpecDag(object):
def test_conflicting_package_constraints(self, set_dependency):
@@ -387,7 +387,6 @@ class TestSpecDag(object):
with pytest.raises(spack.spec.UnsatisfiableArchitectureSpecError):
spec.normalize()
- @pytest.mark.usefixtures('config')
def test_invalid_dep(self):
spec = Spec('libelf ^mpich')
with pytest.raises(spack.spec.InvalidDependencyError):
@@ -602,7 +601,6 @@ class TestSpecDag(object):
copy_ids = set(id(s) for s in copy.traverse())
assert not orig_ids.intersection(copy_ids)
- @pytest.mark.usefixtures('config')
def test_copy_concretized(self):
orig = Spec('mpileaks')
orig.concretize()
diff --git a/lib/spack/spack/test/url_fetch.py b/lib/spack/spack/test/url_fetch.py
index 679240049d..20648b4766 100644
--- a/lib/spack/spack/test/url_fetch.py
+++ b/lib/spack/spack/test/url_fetch.py
@@ -26,10 +26,10 @@ def checksum_type(request):
@pytest.fixture
def pkg_factory():
Pkg = collections.namedtuple(
- 'Pkg', ['url_for_version', 'urls', 'url', 'versions']
+ 'Pkg', ['url_for_version', 'urls', 'url', 'versions', 'fetch_options']
)
- def factory(url, urls):
+ def factory(url, urls, fetch_options={}):
def fn(v):
main_url = url or urls[0]
@@ -37,7 +37,8 @@ def pkg_factory():
return Pkg(
url_for_version=fn, url=url, urls=urls,
- versions=collections.defaultdict(dict)
+ versions=collections.defaultdict(dict),
+ fetch_options=fetch_options
)
return factory
@@ -130,6 +131,10 @@ def test_from_list_url(mock_packages, config, spec, url, digest):
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == url
assert fetch_strategy.digest == digest
+ assert fetch_strategy.extra_options == {}
+ pkg.fetch_options = {'timeout': 60}
+ fetch_strategy = fs.from_list_url(pkg)
+ assert fetch_strategy.extra_options == {'timeout': 60}
def test_from_list_url_unspecified(mock_packages, config):
@@ -142,6 +147,10 @@ def test_from_list_url_unspecified(mock_packages, config):
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == 'foo-2.0.0.tar.gz'
assert fetch_strategy.digest is None
+ assert fetch_strategy.extra_options == {}
+ pkg.fetch_options = {'timeout': 60}
+ fetch_strategy = fs.from_list_url(pkg)
+ assert fetch_strategy.extra_options == {'timeout': 60}
def test_nosource_from_list_url(mock_packages, config):
@@ -191,3 +200,7 @@ def test_candidate_urls(pkg_factory, url, urls, version, expected):
pkg = pkg_factory(url, urls)
f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
assert f.candidate_urls == expected
+ assert f.extra_options == {}
+ pkg = pkg_factory(url, urls, fetch_options={'timeout': 60})
+ f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
+ assert f.extra_options == {'timeout': 60}
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index d617954ab1..1688b49f1b 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -32,6 +32,9 @@ def decompressor_for(path, extension=None):
if extension and re.match(r'gz', extension):
gunzip = which('gunzip', required=True)
return gunzip
+ if extension and re.match(r'bz2', extension):
+ bunzip2 = which('bunzip2', required=True)
+ return bunzip2
tar = which('tar', required=True)
tar.add_default_arg('-xf')
return tar
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index ce30e18f42..1f5fdfb761 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -209,7 +209,7 @@ class Executable(object):
istream.close()
def __eq__(self, other):
- return self.exe == other.exe
+ return hasattr(other, 'exe') and self.exe == other.exe
def __neq__(self, other):
return not (self == other)
diff --git a/lib/spack/spack/util/module_cmd.py b/lib/spack/spack/util/module_cmd.py
index 0edf7e6102..74790156ae 100644
--- a/lib/spack/spack/util/module_cmd.py
+++ b/lib/spack/spack/util/module_cmd.py
@@ -19,16 +19,11 @@ import llnl.util.tty as tty
# If we need another option that changes the environment, add it here.
module_change_commands = ['load', 'swap', 'unload', 'purge', 'use', 'unuse']
py_cmd = "'import os;import json;print(json.dumps(dict(os.environ)))'"
-
-# This is just to enable testing. I hate it but we can't find a better way
-_test_mode = False
+_cmd_template = "'module ' + ' '.join(args) + ' 2>&1'"
def module(*args):
- module_cmd = 'module ' + ' '.join(args) + ' 2>&1'
- if _test_mode:
- tty.warn('module function operating in test mode')
- module_cmd = ". %s 2>&1" % args[1]
+ module_cmd = eval(_cmd_template) # So we can monkeypatch for testing
if args[0] in module_change_commands:
# Do the module manipulation, then output the environment in JSON
# and read the JSON back in the parent process to update os.environ
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 4fb8c5a591..8039dc5fda 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -205,6 +205,8 @@ def push_to_url(
# needs to be done in separate steps.
shutil.copy2(local_file_path, remote_file_path)
os.remove(local_file_path)
+ else:
+ raise
elif remote_url.scheme == 's3':
if extra_args is None: