summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst24
-rw-r--r--lib/spack/docs/build_systems/cmakepackage.rst17
-rw-r--r--lib/spack/docs/build_systems/sippackage.rst12
-rw-r--r--lib/spack/docs/containers.rst2
-rw-r--r--lib/spack/docs/packaging_guide.rst20
-rwxr-xr-xlib/spack/env/cc2
-rw-r--r--lib/spack/external/altgraph/__init__.py9
-rw-r--r--lib/spack/external/distro.py1
-rw-r--r--lib/spack/spack/binary_distribution.py280
-rw-r--r--lib/spack/spack/build_systems/autotools.py6
-rw-r--r--lib/spack/spack/build_systems/cmake.py114
-rw-r--r--lib/spack/spack/build_systems/cuda.py76
-rw-r--r--lib/spack/spack/build_systems/sip.py48
-rw-r--r--lib/spack/spack/caches.py5
-rw-r--r--lib/spack/spack/ci.py5
-rw-r--r--lib/spack/spack/cmd/buildcache.py126
-rw-r--r--lib/spack/spack/cmd/checksum.py3
-rw-r--r--lib/spack/spack/cmd/create.py4
-rw-r--r--lib/spack/spack/cmd/dependencies.py22
-rw-r--r--lib/spack/spack/cmd/dependents.py2
-rw-r--r--lib/spack/spack/cmd/load.py5
-rw-r--r--lib/spack/spack/cmd/mirror.py8
-rw-r--r--lib/spack/spack/config.py72
-rw-r--r--lib/spack/spack/database.py154
-rw-r--r--lib/spack/spack/fetch_strategy.py41
-rw-r--r--lib/spack/spack/mirror.py29
-rw-r--r--lib/spack/spack/package.py42
-rw-r--r--lib/spack/spack/patch.py9
-rw-r--r--lib/spack/spack/relocate.py704
-rw-r--r--lib/spack/spack/schema/config.py1
-rw-r--r--lib/spack/spack/spec.py2
-rw-r--r--lib/spack/spack/stage.py33
-rw-r--r--lib/spack/spack/test/build_systems.py38
-rw-r--r--lib/spack/spack/test/cmd/buildcache.py15
-rw-r--r--lib/spack/spack/test/cmd/dependencies.py4
-rw-r--r--lib/spack/spack/test/cmd/mirror.py23
-rw-r--r--lib/spack/spack/test/config.py87
-rw-r--r--lib/spack/spack/test/conftest.py2
-rw-r--r--lib/spack/spack/test/database.py24
-rw-r--r--lib/spack/spack/test/mirror.py2
-rw-r--r--lib/spack/spack/test/package_class.py66
-rw-r--r--lib/spack/spack/test/packages.py21
-rw-r--r--lib/spack/spack/test/packaging.py571
-rw-r--r--lib/spack/spack/test/url_fetch.py19
-rw-r--r--lib/spack/spack/util/compression.py3
-rw-r--r--lib/spack/spack/util/web.py2
46 files changed, 1816 insertions, 939 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 56d60a29da..d63dec8827 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -25,6 +25,14 @@ It is recommended that the following be put in your ``.bashrc`` file:
alias less='less -R'
+If you do not see colorized output when using ``less -R`` it is because color
+is being disabled in the piped output. In this case, tell spack to force
+colorized output.
+
+.. code-block:: console
+
+ $ spack --color always | less -R
+
--------------------------
Listing available packages
--------------------------
@@ -45,7 +53,7 @@ can install:
.. command-output:: spack list
:ellipsis: 10
-There are thosands of them, so we've truncated the output above, but you
+There are thousands of them, so we've truncated the output above, but you
can find a :ref:`full list here <package-list>`.
Packages are listed by name in alphabetical order.
A pattern to match with no wildcards, ``*`` or ``?``,
@@ -267,7 +275,7 @@ the ``spack gc`` ("garbage collector") command, which will uninstall all unneede
-- linux-ubuntu18.04-broadwell / gcc@9.0.1 ----------------------
hdf5@1.10.5 libiconv@1.16 libpciaccess@0.13.5 libszip@2.1.1 libxml2@2.9.9 mpich@3.3.2 openjpeg@2.3.1 xz@5.2.4 zlib@1.2.11
-In the example above Spack went through all the packages in the DB
+In the example above Spack went through all the packages in the package database
and removed everything that is not either:
1. A package installed upon explicit request of the user
@@ -854,7 +862,7 @@ Variants are named options associated with a particular package. They are
optional, as each package must provide default values for each variant it
makes available. Variants can be specified using
a flexible parameter syntax ``name=<value>``. For example,
-``spack install libelf debug=True`` will install libelf build with debug
+``spack install libelf debug=True`` will install libelf built with debug
flags. The names of particular variants available for a package depend on
what was provided by the package author. ``spack info <package>`` will
provide information on what build variants are available.
@@ -1067,13 +1075,13 @@ of failing:
In the snippet above, for instance, the microarchitecture was demoted to ``haswell`` when
compiling with ``gcc@4.8`` since support to optimize for ``broadwell`` starts from ``gcc@4.9:``.
-Finally if Spack has no information to match compiler and target, it will
+Finally, if Spack has no information to match compiler and target, it will
proceed with the installation but avoid injecting any microarchitecture
specific flags.
.. warning::
- Currently Spack doesn't print any warning to the user if it has no information
+ Currently, Spack doesn't print any warning to the user if it has no information
on which optimization flags should be used for a given compiler. This behavior
might change in the future.
@@ -1083,7 +1091,7 @@ specific flags.
Virtual dependencies
--------------------
-The dependence graph for ``mpileaks`` we saw above wasn't *quite*
+The dependency graph for ``mpileaks`` we saw above wasn't *quite*
accurate. ``mpileaks`` uses MPI, which is an interface that has many
different implementations. Above, we showed ``mpileaks`` and
``callpath`` depending on ``mpich``, which is one *particular*
@@ -1410,12 +1418,12 @@ packages listed as activated:
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
Now, when a user runs python, ``numpy`` will be available for import
-*without* the user having to explicitly loaded. ``python@2.7.8`` now
+*without* the user having to explicitly load it. ``python@2.7.8`` now
acts like a system Python installation with ``numpy`` installed inside
of it.
Spack accomplishes this by symbolically linking the *entire* prefix of
-the ``py-numpy`` into the prefix of the ``python`` package. To the
+the ``py-numpy`` package into the prefix of the ``python`` package. To the
python interpreter, it looks like ``numpy`` is installed in the
``site-packages`` directory.
diff --git a/lib/spack/docs/build_systems/cmakepackage.rst b/lib/spack/docs/build_systems/cmakepackage.rst
index 0a771edad3..76e89c80b1 100644
--- a/lib/spack/docs/build_systems/cmakepackage.rst
+++ b/lib/spack/docs/build_systems/cmakepackage.rst
@@ -128,17 +128,20 @@ Adding flags to cmake
^^^^^^^^^^^^^^^^^^^^^
To add additional flags to the ``cmake`` call, simply override the
-``cmake_args`` function:
+``cmake_args`` function. The following example defines values for the flags
+``WHATEVER``, ``ENABLE_BROKEN_FEATURE``, ``DETECT_HDF5``, and ``THREADS`` with
+and without the :py:meth:`~.CMakePackage.define` and
+:py:meth:`~.CMakePackage.define_from_variant` helper functions:
.. code-block:: python
def cmake_args(self):
- args = []
-
- if '+hdf5' in self.spec:
- args.append('-DDETECT_HDF5=ON')
- else:
- args.append('-DDETECT_HDF5=OFF')
+ args = [
+ '-DWHATEVER:STRING=somevalue',
+ self.define('ENABLE_BROKEN_FEATURE', False),
+ self.define_from_variant('DETECT_HDF5', 'hdf5'),
+ self.define_from_variant('THREADS'), # True if +threads
+ ]
return args
diff --git a/lib/spack/docs/build_systems/sippackage.rst b/lib/spack/docs/build_systems/sippackage.rst
index b8c08ec513..ddf9a26ab9 100644
--- a/lib/spack/docs/build_systems/sippackage.rst
+++ b/lib/spack/docs/build_systems/sippackage.rst
@@ -51,10 +51,8 @@ Build system dependencies
``SIPPackage`` requires several dependencies. Python is needed to run
the ``configure.py`` build script, and to run the resulting Python
libraries. Qt is needed to provide the ``qmake`` command. SIP is also
-needed to build the package. SIP is an unusual dependency in that it
-must be installed in the same installation directory as the package,
-so instead of a ``depends_on``, we use a ``resource``. All of these
-dependencies are automatically added via the base class
+needed to build the package. All of these dependencies are automatically
+added via the base class
.. code-block:: python
@@ -62,11 +60,7 @@ dependencies are automatically added via the base class
depends_on('qt', type='build')
- resource(name='sip',
- url='https://www.riverbankcomputing.com/static/Downloads/sip/4.19.18/sip-4.19.18.tar.gz',
- sha256='c0bd863800ed9b15dcad477c4017cdb73fa805c25908b0240564add74d697e1e',
- destination='.')
-
+ depends_on('py-sip', type='build')
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Passing arguments to ``configure.py``
diff --git a/lib/spack/docs/containers.rst b/lib/spack/docs/containers.rst
index bbb21a2e00..18500ff764 100644
--- a/lib/spack/docs/containers.rst
+++ b/lib/spack/docs/containers.rst
@@ -108,7 +108,7 @@ are currently supported are summarized in the table below:
- ``ubuntu:16.04``
- ``spack/ubuntu-xenial``
* - Ubuntu 18.04
- - ``ubuntu:16.04``
+ - ``ubuntu:18.04``
- ``spack/ubuntu-bionic``
* - CentOS 6
- ``centos:6``
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 41d74fcc74..3c588c240b 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -2913,7 +2913,7 @@ discover its dependencies.
If you want to see the environment that a package will build with, or
if you want to run commands in that environment to test them out, you
-can use the :ref:`cmd-spack-env` command, documented
+can use the :ref:`cmd-spack-build-env` command, documented
below.
^^^^^^^^^^^^^^^^^^^^^
@@ -4332,31 +4332,31 @@ directory, install directory, package directory) and others change to
core spack locations. For example, ``spack cd --module-dir`` will take you to
the main python source directory of your spack install.
-.. _cmd-spack-env:
+.. _cmd-spack-build-env:
-^^^^^^^^^^^^^
-``spack env``
-^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^
+``spack build-env``
+^^^^^^^^^^^^^^^^^^^
-``spack env`` functions much like the standard unix ``env`` command,
-but it takes a spec as an argument. You can use it to see the
+``spack build-env`` functions much like the standard unix ``build-env``
+command, but it takes a spec as an argument. You can use it to see the
environment variables that will be set when a particular build runs,
for example:
.. code-block:: console
- $ spack env mpileaks@1.1%intel
+ $ spack build-env mpileaks@1.1%intel
This will display the entire environment that will be set when the
``mpileaks@1.1%intel`` build runs.
To run commands in a package's build environment, you can simply
-provide them after the spec argument to ``spack env``:
+provide them after the spec argument to ``spack build-env``:
.. code-block:: console
$ spack cd mpileaks@1.1%intel
- $ spack env mpileaks@1.1%intel ./configure
+ $ spack build-env mpileaks@1.1%intel ./configure
This will cd to the build directory and then run ``configure`` in the
package's build environment.
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index 73c5759dfe..f2b8bf577f 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -43,7 +43,7 @@ parameters=(
# The compiler input variables are checked for sanity later:
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
# The default compiler flags are passed from these variables:
-# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
+# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FFLAGS,
# SPACK_LDFLAGS, SPACK_LDLIBS
# Debug env var is optional; set to "TRUE" for debug logging:
# SPACK_DEBUG
diff --git a/lib/spack/external/altgraph/__init__.py b/lib/spack/external/altgraph/__init__.py
index 289c6408d1..ee70a9c91b 100644
--- a/lib/spack/external/altgraph/__init__.py
+++ b/lib/spack/external/altgraph/__init__.py
@@ -139,9 +139,12 @@ To display the graph we can use the GraphViz backend::
@contributor: U{Reka Albert <http://www.phys.psu.edu/~ralbert/>}
'''
-import pkg_resources
-__version__ = pkg_resources.require('altgraph')[0].version
-
+# import pkg_resources
+# __version__ = pkg_resources.require('altgraph')[0].version
+# pkg_resources is not finding the altgraph import despite the fact that it is in sys.path
+# there is no .dist-info or .egg-info for pkg_resources to query the version from
+# so it must be set manually
+__version__ = '0.16.1'
class GraphError(ValueError):
pass
diff --git a/lib/spack/external/distro.py b/lib/spack/external/distro.py
index b63451640a..e3805de75f 100644
--- a/lib/spack/external/distro.py
+++ b/lib/spack/external/distro.py
@@ -64,6 +64,7 @@ NORMALIZED_LSB_ID = {
'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux
'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation
'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server
+ 'redhatenterprisecomputenode': 'rhel', # RHEL 6 ComputeNode
}
#: Translation table for normalizing the distro ID derived from the file name
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 9991a66965..7902e5fc58 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -10,6 +10,9 @@ import tarfile
import shutil
import tempfile
import hashlib
+import glob
+import platform
+
from contextlib import closing
import ruamel.yaml as yaml
@@ -53,7 +56,7 @@ BUILD_CACHE_INDEX_TEMPLATE = '''
BUILD_CACHE_INDEX_ENTRY_TEMPLATE = ' <li><a href="{path}">{path}</a></li>'
-class NoOverwriteException(Exception):
+class NoOverwriteException(spack.error.SpackError):
"""
Raised when a file exists and must be overwritten.
"""
@@ -68,14 +71,18 @@ class NoGpgException(spack.error.SpackError):
"""
Raised when gpg2 is not in PATH
"""
- pass
+
+ def __init__(self, msg):
+ super(NoGpgException, self).__init__(msg)
class NoKeyException(spack.error.SpackError):
"""
Raised when gpg has no default key added.
"""
- pass
+
+ def __init__(self, msg):
+ super(NoKeyException, self).__init__(msg)
class PickKeyException(spack.error.SpackError):
@@ -84,7 +91,7 @@ class PickKeyException(spack.error.SpackError):
"""
def __init__(self, keys):
- err_msg = "Multi keys available for signing\n%s\n" % keys
+ err_msg = "Multiple keys available for signing\n%s\n" % keys
err_msg += "Use spack buildcache create -k <key hash> to pick a key."
super(PickKeyException, self).__init__(err_msg)
@@ -107,7 +114,9 @@ class NewLayoutException(spack.error.SpackError):
"""
Raised if directory layout is different from buildcache.
"""
- pass
+
+ def __init__(self, msg):
+ super(NewLayoutException, self).__init__(msg)
def build_cache_relative_path():
@@ -137,15 +146,21 @@ def read_buildinfo_file(prefix):
return buildinfo
-def write_buildinfo_file(prefix, workdir, rel=False):
+def write_buildinfo_file(spec, workdir, rel=False):
"""
Create a cache file containing information
required for the relocation
"""
+ prefix = spec.prefix
text_to_relocate = []
binary_to_relocate = []
link_to_relocate = []
blacklist = (".spack", "man")
+ prefix_to_hash = dict()
+ prefix_to_hash[str(spec.package.prefix)] = spec.dag_hash()
+ deps = spack.build_environment.get_rpath_deps(spec.package)
+ for d in deps:
+ prefix_to_hash[str(d.prefix)] = d.dag_hash()
# Do this at during tarball creation to save time when tarball unpacked.
# Used by make_package_relative to determine binaries to change.
for root, dirs, files in os.walk(prefix, topdown=True):
@@ -162,8 +177,8 @@ def write_buildinfo_file(prefix, workdir, rel=False):
link_to_relocate.append(rel_path_name)
else:
msg = 'Absolute link %s to %s ' % (path_name, link)
- msg += 'outside of stage %s ' % prefix
- msg += 'cannot be relocated.'
+ msg += 'outside of prefix %s ' % prefix
+ msg += 'should not be relocated.'
tty.warn(msg)
if relocate.needs_binary_relocation(m_type, m_subtype):
@@ -184,6 +199,7 @@ def write_buildinfo_file(prefix, workdir, rel=False):
buildinfo['relocate_textfiles'] = text_to_relocate
buildinfo['relocate_binaries'] = binary_to_relocate
buildinfo['relocate_links'] = link_to_relocate
+ buildinfo['prefix_to_hash'] = prefix_to_hash
filename = buildinfo_file_name(workdir)
with open(filename, 'w') as outfile:
outfile.write(syaml.dump(buildinfo, default_flow_style=True))
@@ -308,7 +324,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
tmpdir = tempfile.mkdtemp()
cache_prefix = build_cache_prefix(tmpdir)
- tarfile_name = tarball_name(spec, '.tar.bz2')
+ tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_dir = os.path.join(cache_prefix, tarball_directory_name(spec))
tarfile_path = os.path.join(tarfile_dir, tarfile_name)
spackfile_path = os.path.join(
@@ -356,7 +372,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
os.remove(temp_tarfile_path)
# create info for later relocation and create tar
- write_buildinfo_file(spec.prefix, workdir, rel=rel)
+ write_buildinfo_file(spec, workdir, rel)
# optionally make the paths in the binaries relative to each other
# in the spack install tree before creating tarball
@@ -370,15 +386,15 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
tty.die(e)
else:
try:
- make_package_placeholder(workdir, spec, allow_root)
+ check_package_relocatable(workdir, spec, allow_root)
except Exception as e:
shutil.rmtree(workdir)
shutil.rmtree(tarfile_dir)
shutil.rmtree(tmpdir)
tty.die(e)
- # create compressed tarball of the install prefix
- with closing(tarfile.open(tarfile_path, 'w:bz2')) as tar:
+ # create gzip compressed tarball of the install prefix
+ with closing(tarfile.open(tarfile_path, 'w:gz')) as tar:
tar.add(name='%s' % workdir,
arcname='%s' % os.path.basename(spec.prefix))
# remove copy of install directory
@@ -400,6 +416,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
buildinfo = {}
buildinfo['relative_prefix'] = os.path.relpath(
spec.prefix, spack.store.layout.root)
+ buildinfo['relative_rpaths'] = rel
spec_dict['buildinfo'] = buildinfo
spec_dict['full_hash'] = spec.full_hash()
@@ -433,6 +450,9 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
web_util.push_to_url(
specfile_path, remote_specfile_path, keep_original=False)
+ tty.msg('Buildache for "%s" written to \n %s' %
+ (spec, remote_spackfile_path))
+
try:
# create an index.html for the build_cache directory so specs can be
# found
@@ -478,100 +498,149 @@ def make_package_relative(workdir, spec, allow_root):
"""
prefix = spec.prefix
buildinfo = read_buildinfo_file(workdir)
- old_path = buildinfo['buildpath']
+ old_layout_root = buildinfo['buildpath']
orig_path_names = list()
cur_path_names = list()
for filename in buildinfo['relocate_binaries']:
orig_path_names.append(os.path.join(prefix, filename))
cur_path_names.append(os.path.join(workdir, filename))
- if spec.architecture.platform == 'darwin':
+ if (spec.architecture.platform == 'darwin' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'darwin'):
relocate.make_macho_binaries_relative(cur_path_names, orig_path_names,
- old_path, allow_root)
- else:
+ old_layout_root)
+ if (spec.architecture.platform == 'linux' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'linux'):
relocate.make_elf_binaries_relative(cur_path_names, orig_path_names,
- old_path, allow_root)
+ old_layout_root)
+ relocate.check_files_relocatable(cur_path_names, allow_root)
orig_path_names = list()
cur_path_names = list()
- for filename in buildinfo.get('relocate_links', []):
- orig_path_names.append(os.path.join(prefix, filename))
- cur_path_names.append(os.path.join(workdir, filename))
+ for linkname in buildinfo.get('relocate_links', []):
+ orig_path_names.append(os.path.join(prefix, linkname))
+ cur_path_names.append(os.path.join(workdir, linkname))
relocate.make_link_relative(cur_path_names, orig_path_names)
-def make_package_placeholder(workdir, spec, allow_root):
+def check_package_relocatable(workdir, spec, allow_root):
"""
Check if package binaries are relocatable.
Change links to placeholder links.
"""
- prefix = spec.prefix
buildinfo = read_buildinfo_file(workdir)
cur_path_names = list()
for filename in buildinfo['relocate_binaries']:
cur_path_names.append(os.path.join(workdir, filename))
relocate.check_files_relocatable(cur_path_names, allow_root)
- cur_path_names = list()
- for filename in buildinfo.get('relocate_links', []):
- cur_path_names.append(os.path.join(workdir, filename))
- relocate.make_link_placeholder(cur_path_names, workdir, prefix)
-
-def relocate_package(workdir, spec, allow_root):
+def relocate_package(spec, allow_root):
"""
Relocate the given package
"""
+ workdir = str(spec.prefix)
buildinfo = read_buildinfo_file(workdir)
- new_path = str(spack.store.layout.root)
- new_prefix = str(spack.paths.prefix)
- old_path = str(buildinfo['buildpath'])
- old_prefix = str(buildinfo.get('spackprefix',
- '/not/in/buildinfo/dictionary'))
- rel = buildinfo.get('relative_rpaths', False)
-
- tty.msg("Relocating package from",
- "%s to %s." % (old_path, new_path))
- path_names = set()
+ new_layout_root = str(spack.store.layout.root)
+ new_prefix = str(spec.prefix)
+ new_rel_prefix = str(os.path.relpath(new_prefix, new_layout_root))
+ new_spack_prefix = str(spack.paths.prefix)
+ old_layout_root = str(buildinfo['buildpath'])
+ old_spack_prefix = str(buildinfo.get('spackprefix'))
+ old_rel_prefix = buildinfo.get('relative_prefix')
+ old_prefix = os.path.join(old_layout_root, old_rel_prefix)
+ rel = buildinfo.get('relative_rpaths')
+ prefix_to_hash = buildinfo.get('prefix_to_hash', None)
+ if (old_rel_prefix != new_rel_prefix and not prefix_to_hash):
+ msg = "Package tarball was created from an install "
+ msg += "prefix with a different directory layout and an older "
+ msg += "buildcache create implementation. It cannot be relocated."
+ raise NewLayoutException(msg)
+ # older buildcaches do not have the prefix_to_hash dictionary
+ # need to set an empty dictionary and add one entry to
+ # prefix_to_prefix to reproduce the old behavior
+ if not prefix_to_hash:
+ prefix_to_hash = dict()
+ hash_to_prefix = dict()
+ hash_to_prefix[spec.format('{hash}')] = str(spec.package.prefix)
+ new_deps = spack.build_environment.get_rpath_deps(spec.package)
+ for d in new_deps:
+ hash_to_prefix[d.format('{hash}')] = str(d.prefix)
+ prefix_to_prefix = dict()
+ for orig_prefix, hash in prefix_to_hash.items():
+ prefix_to_prefix[orig_prefix] = hash_to_prefix.get(hash, None)
+ prefix_to_prefix[old_prefix] = new_prefix
+ prefix_to_prefix[old_layout_root] = new_layout_root
+
+ tty.debug("Relocating package from",
+ "%s to %s." % (old_layout_root, new_layout_root))
+
+ def is_backup_file(file):
+ return file.endswith('~')
+
+ # Text files containing the prefix text
+ text_names = list()
for filename in buildinfo['relocate_textfiles']:
- path_name = os.path.join(workdir, filename)
+ text_name = os.path.join(workdir, filename)
# Don't add backup files generated by filter_file during install step.
- if not path_name.endswith('~'):
- path_names.add(path_name)
- relocate.relocate_text(path_names, oldpath=old_path,
- newpath=new_path, oldprefix=old_prefix,
- newprefix=new_prefix)
- # If the binary files in the package were not edited to use
- # relative RPATHs, then the RPATHs need to be relocated
- if rel:
- if old_path != new_path:
- files_to_relocate = list(filter(
- lambda pathname: not relocate.file_is_relocatable(
- pathname, paths_to_relocate=[old_path, old_prefix]),
- map(lambda filename: os.path.join(workdir, filename),
- buildinfo['relocate_binaries'])))
-
- if len(old_path) < len(new_path) and files_to_relocate:
- tty.debug('Cannot do a binary string replacement with padding '
- 'for package because %s is longer than %s.' %
- (new_path, old_path))
- else:
- for path_name in files_to_relocate:
- relocate.replace_prefix_bin(path_name, old_path, new_path)
- else:
- path_names = set()
- for filename in buildinfo['relocate_binaries']:
- path_name = os.path.join(workdir, filename)
- path_names.add(path_name)
- if spec.architecture.platform == 'darwin':
- relocate.relocate_macho_binaries(path_names, old_path,
- new_path, allow_root)
- else:
- relocate.relocate_elf_binaries(path_names, old_path,
- new_path, allow_root)
- path_names = set()
- for filename in buildinfo.get('relocate_links', []):
- path_name = os.path.join(workdir, filename)
- path_names.add(path_name)
- relocate.relocate_links(path_names, old_path, new_path)
+ if not is_backup_file(text_name):
+ text_names.append(text_name)
+
+# If we are installing back to the same location don't replace anything
+ if old_layout_root != new_layout_root:
+ paths_to_relocate = [old_spack_prefix, old_layout_root]
+ paths_to_relocate.extend(prefix_to_hash.keys())
+ files_to_relocate = list(filter(
+ lambda pathname: not relocate.file_is_relocatable(
+ pathname, paths_to_relocate=paths_to_relocate),
+ map(lambda filename: os.path.join(workdir, filename),
+ buildinfo['relocate_binaries'])))
+ # If the buildcache was not created with relativized rpaths
+ # do the relocation of path in binaries
+ if (spec.architecture.platform == 'darwin' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'darwin'):
+ relocate.relocate_macho_binaries(files_to_relocate,
+ old_layout_root,
+ new_layout_root,
+ prefix_to_prefix, rel,
+ old_prefix,
+ new_prefix)
+ if (spec.architecture.platform == 'linux' or
+ spec.architecture.platform == 'test' and
+ platform.system().lower() == 'linux'):
+ relocate.relocate_elf_binaries(files_to_relocate,
+ old_layout_root,
+ new_layout_root,
+ prefix_to_prefix, rel,
+ old_prefix,
+ new_prefix)
+ # Relocate links to the new install prefix
+ link_names = [linkname
+ for linkname in buildinfo.get('relocate_links', [])]
+ relocate.relocate_links(link_names,
+ old_layout_root,
+ new_layout_root,
+ old_prefix,
+ new_prefix,
+ prefix_to_prefix)
+
+ # For all buildcaches
+ # relocate the install prefixes in text files including dependencies
+ relocate.relocate_text(text_names,
+ old_layout_root, new_layout_root,
+ old_prefix, new_prefix,
+ old_spack_prefix,
+ new_spack_prefix,
+ prefix_to_prefix)
+
+ # relocate the install prefixes in binary files including dependencies
+ relocate.relocate_text_bin(files_to_relocate,
+ old_layout_root, new_layout_root,
+ old_prefix, new_prefix,
+ old_spack_prefix,
+ new_spack_prefix,
+ prefix_to_prefix)
def extract_tarball(spec, filename, allow_root=False, unsigned=False,
@@ -589,16 +658,16 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
stagepath = os.path.dirname(filename)
spackfile_name = tarball_name(spec, '.spack')
spackfile_path = os.path.join(stagepath, spackfile_name)
- tarfile_name = tarball_name(spec, '.tar.bz2')
+ tarfile_name = tarball_name(spec, '.tar.gz')
tarfile_path = os.path.join(tmpdir, tarfile_name)
specfile_name = tarball_name(spec, '.spec.yaml')
specfile_path = os.path.join(tmpdir, specfile_name)
with closing(tarfile.open(spackfile_path, 'r')) as tar:
tar.extractall(tmpdir)
- # older buildcache tarfiles use gzip compression
+ # some buildcache tarfiles use bzip2 compression
if not os.path.exists(tarfile_path):
- tarfile_name = tarball_name(spec, '.tar.gz')
+ tarfile_name = tarball_name(spec, '.tar.bz2')
tarfile_path = os.path.join(tmpdir, tarfile_name)
if not unsigned:
if os.path.exists('%s.asc' % specfile_path):
@@ -607,7 +676,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
Gpg.verify('%s.asc' % specfile_path, specfile_path, suppress)
except Exception as e:
shutil.rmtree(tmpdir)
- tty.die(e)
+ raise e
else:
shutil.rmtree(tmpdir)
raise NoVerifyException(
@@ -636,22 +705,30 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
# if the original relative prefix is in the spec file use it
buildinfo = spec_dict.get('buildinfo', {})
old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
+ rel = buildinfo.get('relative_rpaths')
# if the original relative prefix and new relative prefix differ the
# directory layout has changed and the buildcache cannot be installed
- if old_relative_prefix != new_relative_prefix:
- shutil.rmtree(tmpdir)
- msg = "Package tarball was created from an install "
- msg += "prefix with a different directory layout.\n"
- msg += "It cannot be relocated."
- raise NewLayoutException(msg)
+ # if it was created with relative rpaths
+ info = 'old relative prefix %s\nnew relative prefix %s\nrelative rpaths %s'
+ tty.debug(info %
+ (old_relative_prefix, new_relative_prefix, rel))
+# if (old_relative_prefix != new_relative_prefix and (rel)):
+# shutil.rmtree(tmpdir)
+# msg = "Package tarball was created from an install "
+# msg += "prefix with a different directory layout. "
+# msg += "It cannot be relocated because it "
+# msg += "uses relative rpaths."
+# raise NewLayoutException(msg)
# extract the tarball in a temp directory
with closing(tarfile.open(tarfile_path, 'r')) as tar:
tar.extractall(path=tmpdir)
- # the base of the install prefix is used when creating the tarball
- # so the pathname should be the same now that the directory layout
- # is confirmed
- workdir = os.path.join(tmpdir, os.path.basename(spec.prefix))
+ # get the parent directory of the file .spack/binary_distribution
+ # this should the directory unpacked from the tarball whose
+ # name is unknown because the prefix naming is unknown
+ bindist_file = glob.glob('%s/*/.spack/binary_distribution' % tmpdir)[0]
+ workdir = re.sub('/.spack/binary_distribution$', '', bindist_file)
+ tty.debug('workdir %s' % workdir)
# install_tree copies hardlinks
# create a temporary tarfile from prefix and exract it to workdir
# tarfile preserves hardlinks
@@ -669,10 +746,10 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
os.remove(specfile_path)
try:
- relocate_package(spec.prefix, spec, allow_root)
+ relocate_package(spec, allow_root)
except Exception as e:
shutil.rmtree(spec.prefix)
- tty.die(e)
+ raise e
else:
manifest_file = os.path.join(spec.prefix,
spack.store.layout.metadata_dir,
@@ -682,6 +759,8 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
tty.warn('No manifest file in tarball for spec %s' % spec_id)
finally:
shutil.rmtree(tmpdir)
+ if os.path.exists(filename):
+ os.remove(filename)
# Internal cache for downloaded specs
@@ -729,7 +808,7 @@ def get_spec(spec=None, force=False):
tty.debug("No Spack mirrors are currently configured")
return {}
- if spec in _cached_specs:
+ if _cached_specs and spec in _cached_specs:
return _cached_specs
for mirror in spack.mirror.MirrorCollection().values():
@@ -799,6 +878,7 @@ def get_specs(force=False, allarch=False):
def get_keys(install=False, trust=False, force=False):
"""
Get pgp public keys available on mirror
+ with suffix .key or .pub
"""
if not spack.mirror.MirrorCollection():
tty.die("Please add a spack mirror to allow " +
@@ -813,18 +893,20 @@ def get_keys(install=False, trust=False, force=False):
mirror_dir = url_util.local_file_path(fetch_url_build_cache)
if mirror_dir:
tty.msg("Finding public keys in %s" % mirror_dir)
- files = os.listdir(mirror_dir)
+ files = os.listdir(str(mirror_dir))
for file in files:
- if re.search(r'\.key', file):
+ if re.search(r'\.key', file) or re.search(r'\.pub', file):
link = url_util.join(fetch_url_build_cache, file)
keys.add(link)
else:
tty.msg("Finding public keys at %s" %
url_util.format(fetch_url_build_cache))
- p, links = web_util.spider(fetch_url_build_cache, depth=1)
+ # For s3 mirror need to request index.html directly
+ p, links = web_util.spider(
+ url_util.join(fetch_url_build_cache, 'index.html'), depth=1)
for link in links:
- if re.search(r'\.key', link):
+ if re.search(r'\.key', link) or re.search(r'\.pub', link):
keys.add(link)
for link in keys:
diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py
index c21b8dad71..5b4f223d41 100644
--- a/lib/spack/spack/build_systems/autotools.py
+++ b/lib/spack/spack/build_systems/autotools.py
@@ -263,6 +263,12 @@ class AutotoolsPackage(PackageBase):
if values:
values_str = '{0}={1}'.format(flag.upper(), ' '.join(values))
self.configure_flag_args.append(values_str)
+ # Spack's fflags are meant for both F77 and FC, therefore we
+ # additionaly set FCFLAGS if required.
+ values = flags.get('fflags', None)
+ if values:
+ values_str = 'FCFLAGS={0}'.format(' '.join(values))
+ self.configure_flag_args.append(values_str)
def configure(self, spec, prefix):
"""Runs configure with the arguments specified in
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
index 14f33e94e6..d7da957a9d 100644
--- a/lib/spack/spack/build_systems/cmake.py
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -147,33 +147,129 @@ class CMakePackage(PackageBase):
except KeyError:
build_type = 'RelWithDebInfo'
+ define = CMakePackage.define
args = [
'-G', generator,
- '-DCMAKE_INSTALL_PREFIX:PATH={0}'.format(pkg.prefix),
- '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type),
+ define('CMAKE_INSTALL_PREFIX', pkg.prefix),
+ define('CMAKE_BUILD_TYPE', build_type),
]
if primary_generator == 'Unix Makefiles':
- args.append('-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON')
+ args.append(define('CMAKE_VERBOSE_MAKEFILE', True))
if platform.mac_ver()[0]:
args.extend([
- '-DCMAKE_FIND_FRAMEWORK:STRING=LAST',
- '-DCMAKE_FIND_APPBUNDLE:STRING=LAST'
+ define('CMAKE_FIND_FRAMEWORK', "LAST"),
+ define('CMAKE_FIND_APPBUNDLE', "LAST"),
])
# Set up CMake rpath
- args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE')
- rpaths = ';'.join(spack.build_environment.get_rpaths(pkg))
- args.append('-DCMAKE_INSTALL_RPATH:STRING={0}'.format(rpaths))
+ args.extend([
+ define('CMAKE_INSTALL_RPATH_USE_LINK_PATH', False),
+ define('CMAKE_INSTALL_RPATH',
+ spack.build_environment.get_rpaths(pkg)),
+ ])
# CMake's find_package() looks in CMAKE_PREFIX_PATH first, help CMake
# to find immediate link dependencies in right places:
deps = [d.prefix for d in
pkg.spec.dependencies(deptype=('build', 'link'))]
deps = filter_system_paths(deps)
- args.append('-DCMAKE_PREFIX_PATH:STRING={0}'.format(';'.join(deps)))
+ args.append(define('CMAKE_PREFIX_PATH', deps))
return args
+ @staticmethod
+ def define(cmake_var, value):
+ """Return a CMake command line argument that defines a variable.
+
+ The resulting argument will convert boolean values to OFF/ON
+ and lists/tuples to CMake semicolon-separated string lists. All other
+ values will be interpreted as strings.
+
+ Examples:
+
+ .. code-block:: python
+
+ [define('BUILD_SHARED_LIBS', True),
+ define('CMAKE_CXX_STANDARD', 14),
+ define('swr', ['avx', 'avx2'])]
+
+ will generate the following configuration options:
+
+ .. code-block:: console
+
+ ["-DBUILD_SHARED_LIBS:BOOL=ON",
+ "-DCMAKE_CXX_STANDARD:STRING=14",
+ "-DSWR:STRING=avx;avx2]
+
+ """
+ # Create a list of pairs. Each pair includes a configuration
+ # option and whether or not that option is activated
+ if isinstance(value, bool):
+ kind = 'BOOL'
+ value = "ON" if value else "OFF"
+ else:
+ kind = 'STRING'
+ if isinstance(value, (list, tuple)):
+ value = ";".join(str(v) for v in value)
+ else:
+ value = str(value)
+
+ return "".join(["-D", cmake_var, ":", kind, "=", value])
+
+ def define_from_variant(self, cmake_var, variant=None):
+ """Return a CMake command line argument from the given variant's value.
+
+ The optional ``variant`` argument defaults to the lower-case transform
+ of ``cmake_var``.
+
+ This utility function is similar to
+ :py:meth:`~.AutotoolsPackage.with_or_without`.
+
+ Examples:
+
+ Given a package with:
+
+ .. code-block:: python
+
+ variant('cxxstd', default='11', values=('11', '14'),
+ multi=False, description='')
+ variant('shared', default=True, description='')
+ variant('swr', values=any_combination_of('avx', 'avx2'),
+ description='')
+
+ calling this function like:
+
+ .. code-block:: python
+
+ [define_from_variant('BUILD_SHARED_LIBS', 'shared'),
+ define_from_variant('CMAKE_CXX_STANDARD', 'cxxstd'),
+ define_from_variant('SWR')]
+
+ will generate the following configuration options:
+
+ .. code-block:: console
+
+ ["-DBUILD_SHARED_LIBS:BOOL=ON",
+ "-DCMAKE_CXX_STANDARD:STRING=14",
+ "-DSWR:STRING=avx;avx2]
+
+ for ``<spec-name> cxxstd=14 +shared swr=avx,avx2``
+ """
+
+ if variant is None:
+ variant = cmake_var.lower()
+
+ if variant not in self.variants:
+ raise KeyError(
+ '"{0}" is not a variant of "{1}"'.format(variant, self.name))
+
+ value = self.spec.variants[variant].value
+ if isinstance(value, (tuple, list)):
+ # Sort multi-valued variants for reproducibility
+ value = sorted(value)
+
+ return self.define(cmake_var, value)
+
def flags_to_build_system_args(self, flags):
"""Produces a list of all command line arguments to pass the specified
compiler flags to cmake. Note CMAKE does not have a cppflags option,
diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py
index fc96cffe60..50a7002dbb 100644
--- a/lib/spack/spack/build_systems/cuda.py
+++ b/lib/spack/spack/build_systems/cuda.py
@@ -13,39 +13,65 @@ class CudaPackage(PackageBase):
"""Auxiliary class which contains CUDA variant, dependencies and conflicts
and is meant to unify and facilitate its usage.
"""
+ maintainers = ['ax3l', 'svenevs']
- # FIXME: keep cuda and cuda_arch separate to make usage easier untill
+ # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
+ # https://developer.nvidia.com/cuda-gpus
+ # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
+ cuda_arch_values = [
+ '10', '11', '12', '13',
+ '20', '21',
+ '30', '32', '35', '37',
+ '50', '52', '53',
+ '60', '61', '62',
+ '70', '72', '75',
+ ]
+
+ # FIXME: keep cuda and cuda_arch separate to make usage easier until
# Spack has depends_on(cuda, when='cuda_arch!=None') or alike
variant('cuda', default=False,
description='Build with CUDA')
- # see http://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#gpu-feature-list
- # https://developer.nvidia.com/cuda-gpus
+
variant('cuda_arch',
description='CUDA architecture',
- values=spack.variant.any_combination_of(
- '20', '30', '32', '35', '50', '52', '53', '60', '61',
- '62', '70', '72', '75'
- ))
+ values=spack.variant.any_combination_of(*cuda_arch_values))
- # see http://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
- # and http://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
+ # https://docs.nvidia.com/cuda/cuda-compiler-driver-nvcc/index.html#nvcc-examples
+ # https://llvm.org/docs/CompileCudaWithLLVM.html#compiling-cuda-code
@staticmethod
def cuda_flags(arch_list):
return [('--generate-code arch=compute_{0},code=sm_{0} '
'--generate-code arch=compute_{0},code=compute_{0}').format(s)
for s in arch_list]
- depends_on("cuda@7:", when='+cuda')
+ depends_on('cuda', when='+cuda')
# CUDA version vs Architecture
- depends_on("cuda@8:", when='cuda_arch=60')
- depends_on("cuda@8:", when='cuda_arch=61')
- depends_on("cuda@8:", when='cuda_arch=62')
- depends_on("cuda@9:", when='cuda_arch=70')
- depends_on("cuda@9:", when='cuda_arch=72')
- depends_on("cuda@10:", when='cuda_arch=75')
+ # https://en.wikipedia.org/wiki/CUDA#GPUs_supported
+ depends_on('cuda@:6.0', when='cuda_arch=10')
+ depends_on('cuda@:6.5', when='cuda_arch=11')
+ depends_on('cuda@2.1:6.5', when='cuda_arch=12')
+ depends_on('cuda@2.1:6.5', when='cuda_arch=13')
+
+ depends_on('cuda@3.0:8.0', when='cuda_arch=20')
+ depends_on('cuda@3.2:8.0', when='cuda_arch=21')
+
+ depends_on('cuda@5.0:10.2', when='cuda_arch=30')
+ depends_on('cuda@5.0:10.2', when='cuda_arch=32')
+ depends_on('cuda@5.0:10.2', when='cuda_arch=35')
+ depends_on('cuda@6.5:10.2', when='cuda_arch=37')
+
+ depends_on('cuda@6.0:', when='cuda_arch=50')
+ depends_on('cuda@6.5:', when='cuda_arch=52')
+ depends_on('cuda@6.5:', when='cuda_arch=53')
+
+ depends_on('cuda@8.0:', when='cuda_arch=60')
+ depends_on('cuda@8.0:', when='cuda_arch=61')
+ depends_on('cuda@8.0:', when='cuda_arch=62')
- depends_on('cuda@:8', when='cuda_arch=20')
+ depends_on('cuda@9.0:', when='cuda_arch=70')
+ depends_on('cuda@9.0:', when='cuda_arch=72')
+ depends_on('cuda@10.0:', when='cuda_arch=75')
# There are at least three cases to be aware of for compiler conflicts
# 1. Linux x86_64
@@ -130,18 +156,8 @@ class CudaPackage(PackageBase):
# `clang-apple@x.y.z as a possible fix.
# Compiler conflicts will be eventual taken from here:
# https://docs.nvidia.com/cuda/cuda-installation-guide-mac-os-x/index.html#abstract
+ conflicts('platform=darwin', when='+cuda ^cuda@11.0:')
# Make sure cuda_arch can not be used without +cuda
- conflicts('~cuda', when='cuda_arch=20')
- conflicts('~cuda', when='cuda_arch=30')
- conflicts('~cuda', when='cuda_arch=32')
- conflicts('~cuda', when='cuda_arch=35')
- conflicts('~cuda', when='cuda_arch=50')
- conflicts('~cuda', when='cuda_arch=52')
- conflicts('~cuda', when='cuda_arch=53')
- conflicts('~cuda', when='cuda_arch=60')
- conflicts('~cuda', when='cuda_arch=61')
- conflicts('~cuda', when='cuda_arch=62')
- conflicts('~cuda', when='cuda_arch=70')
- conflicts('~cuda', when='cuda_arch=72')
- conflicts('~cuda', when='cuda_arch=75')
+ for value in cuda_arch_values:
+ conflicts('~cuda', when='cuda_arch=' + value)
diff --git a/lib/spack/spack/build_systems/sip.py b/lib/spack/spack/build_systems/sip.py
index 314f91d5d2..f814ef1837 100644
--- a/lib/spack/spack/build_systems/sip.py
+++ b/lib/spack/spack/build_systems/sip.py
@@ -5,9 +5,10 @@
import inspect
-from llnl.util.filesystem import working_dir
-from spack.directives import depends_on, extends, resource
-from spack.package import PackageBase, run_before, run_after
+from llnl.util.filesystem import working_dir, join_path
+from spack.directives import depends_on, extends
+from spack.package import PackageBase, run_after
+import os
class SIPPackage(PackageBase):
@@ -40,33 +41,12 @@ class SIPPackage(PackageBase):
extends('python')
depends_on('qt')
-
- resource(name='sip',
- url='https://www.riverbankcomputing.com/static/Downloads/sip/4.19.18/sip-4.19.18.tar.gz',
- sha256='c0bd863800ed9b15dcad477c4017cdb73fa805c25908b0240564add74d697e1e',
- destination='.')
+ depends_on('py-sip')
def python(self, *args, **kwargs):
"""The python ``Executable``."""
inspect.getmodule(self).python(*args, **kwargs)
- @run_before('configure')
- def install_sip(self):
- args = [
- '--sip-module={0}'.format(self.sip_module),
- '--bindir={0}'.format(self.prefix.bin),
- '--destdir={0}'.format(inspect.getmodule(self).site_packages_dir),
- '--incdir={0}'.format(inspect.getmodule(self).python_include_dir),
- '--sipdir={0}'.format(self.prefix.share.sip),
- '--stubsdir={0}'.format(inspect.getmodule(self).site_packages_dir),
- ]
-
- with working_dir('sip-4.19.18'):
- self.python('configure.py', *args)
-
- inspect.getmodule(self).make()
- inspect.getmodule(self).make('install')
-
def configure_file(self):
"""Returns the name of the configure file to use."""
return 'configure.py'
@@ -77,12 +57,15 @@ class SIPPackage(PackageBase):
args = self.configure_args()
+ python_include_dir = 'python' + str(spec['python'].version.up_to(2))
+
args.extend([
'--verbose',
'--confirm-license',
'--qmake', spec['qt'].prefix.bin.qmake,
- '--sip', prefix.bin.sip,
- '--sip-incdir', inspect.getmodule(self).python_include_dir,
+ '--sip', spec['py-sip'].prefix.bin.sip,
+ '--sip-incdir', join_path(spec['py-sip'].prefix.include,
+ python_include_dir),
'--bindir', prefix.bin,
'--destdir', inspect.getmodule(self).site_packages_dir,
])
@@ -131,3 +114,14 @@ class SIPPackage(PackageBase):
# Check that self.prefix is there after installation
run_after('install')(PackageBase.sanity_check_prefix)
+
+ @run_after('install')
+ def extend_path_setup(self):
+ # See github issue #14121 and PR #15297
+ module = self.spec['py-sip'].variants['module'].value
+ if module != 'sip':
+ module = module.split('.')[0]
+ with working_dir(inspect.getmodule(self).site_packages_dir):
+ with open(os.path.join(module, '__init__.py'), 'a') as f:
+ f.write('from pkgutil import extend_path\n')
+ f.write('__path__ = extend_path(__path__, __name__)\n')
diff --git a/lib/spack/spack/caches.py b/lib/spack/spack/caches.py
index 98fa8d5795..49624c06b2 100644
--- a/lib/spack/spack/caches.py
+++ b/lib/spack/spack/caches.py
@@ -50,8 +50,9 @@ def _fetch_cache():
class MirrorCache(object):
- def __init__(self, root):
+ def __init__(self, root, skip_unstable_versions):
self.root = os.path.abspath(root)
+ self.skip_unstable_versions = skip_unstable_versions
def store(self, fetcher, relative_dest):
"""Fetch and relocate the fetcher's target into our mirror cache."""
@@ -84,5 +85,3 @@ class MirrorCache(object):
#: Spack's local cache for downloaded source archives
fetch_cache = llnl.util.lang.Singleton(_fetch_cache)
-
-mirror_cache = None
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index ed06524073..cbdfccb8bf 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -947,8 +947,9 @@ def read_cdashid_from_mirror(spec, mirror_url):
def push_mirror_contents(env, spec, yaml_path, mirror_url, build_id):
if mirror_url:
tty.debug('Creating buildcache')
- buildcache._createtarball(env, yaml_path, None, mirror_url, None,
- True, True, False, False, True, False)
+ buildcache._createtarball(env, yaml_path, None, True, False,
+ mirror_url, None, True, False, False, True,
+ False)
if build_id:
tty.debug('Writing cdashid ({0}) to remote mirror: {1}'.format(
build_id, mirror_url))
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 392984f852..35e735cdf1 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -52,16 +52,35 @@ def setup_parser(subparser):
create.add_argument('-k', '--key', metavar='key',
type=str, default=None,
help="Key for signing.")
- create.add_argument('-d', '--directory', metavar='directory',
- type=str, default='.',
- help="directory in which to save the tarballs.")
+ output = create.add_mutually_exclusive_group(required=True)
+ output.add_argument('-d', '--directory',
+ metavar='directory',
+ type=str,
+ help="local directory where " +
+ "buildcaches will be written.")
+ output.add_argument('-m', '--mirror-name',
+ metavar='mirror-name',
+ type=str,
+ help="name of the mirror where " +
+ "buildcaches will be written.")
+ output.add_argument('--mirror-url',
+ metavar='mirror-url',
+ type=str,
+ help="URL of the mirror where " +
+ "buildcaches will be written.")
create.add_argument('--no-rebuild-index', action='store_true',
default=False, help="skip rebuilding index after " +
"building package(s)")
create.add_argument('-y', '--spec-yaml', default=None,
help='Create buildcache entry for spec from yaml file')
- create.add_argument('--no-deps', action='store_true', default='false',
- help='Create buildcache entry wo/ dependencies')
+ create.add_argument('--only', default='package,dependencies',
+ dest='things_to_install',
+ choices=['package', 'dependencies'],
+ help=('Select the buildcache mode. the default is to'
+ ' build a cache for the package along with all'
+ ' its dependencies. Alternatively, one can'
+ ' decide to build a cache for only the package'
+ ' or only the dependencies'))
arguments.add_common_arguments(create, ['specs'])
create.set_defaults(func=createtarball)
@@ -76,6 +95,10 @@ def setup_parser(subparser):
install.add_argument('-u', '--unsigned', action='store_true',
help="install unsigned buildcache" +
" tarballs for testing")
+ install.add_argument('-o', '--otherarch', action='store_true',
+ help="install specs from other architectures" +
+ " instead of default platform and OS")
+
arguments.add_common_arguments(install, ['specs'])
install.set_defaults(func=installtarball)
@@ -252,7 +275,8 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, env=None):
return specs_from_cli
-def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
+def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
+ other_arch=False):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
@@ -266,7 +290,7 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
- allarch = False
+ allarch = other_arch
specs = bindist.get_specs(force, allarch)
for pkg in pkgs:
matches = []
@@ -299,8 +323,9 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False):
return specs_from_cli
-def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
- rel, unsigned, allow_root, no_rebuild_index):
+def _createtarball(env, spec_yaml, packages, add_spec, add_deps,
+ output_location, key, force, rel, unsigned, allow_root,
+ no_rebuild_index):
if spec_yaml:
packages = set()
with open(spec_yaml, 'r') as fd:
@@ -320,13 +345,12 @@ def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
pkgs = set(packages)
specs = set()
- outdir = '.'
- if directory:
- outdir = directory
-
- mirror = spack.mirror.MirrorCollection().lookup(outdir)
+ mirror = spack.mirror.MirrorCollection().lookup(output_location)
outdir = url_util.format(mirror.push_url)
+ msg = 'Buildcache files will be output to %s/build_cache' % outdir
+ tty.msg(msg)
+
signkey = None
if key:
signkey = key
@@ -342,14 +366,23 @@ def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
tty.debug('skipping external or virtual spec %s' %
match.format())
else:
- tty.debug('adding matching spec %s' % match.format())
- specs.add(match)
- if no_deps is True:
+ if add_spec:
+ tty.debug('adding matching spec %s' % match.format())
+ specs.add(match)
+ else:
+ tty.debug('skipping matching spec %s' % match.format())
+
+ if not add_deps:
continue
+
tty.debug('recursing dependencies')
for d, node in match.traverse(order='post',
depth=True,
deptype=('link', 'run')):
+ # skip root, since it's handled above
+ if d == 0:
+ continue
+
if node.external or node.virtual:
tty.debug('skipping external or virtual dependency %s' %
node.format())
@@ -360,14 +393,10 @@ def _createtarball(env, spec_yaml, packages, directory, key, no_deps, force,
tty.debug('writing tarballs to %s/build_cache' % outdir)
for spec in specs:
- tty.msg('creating binary cache file for package %s ' % spec.format())
- try:
- bindist.build_tarball(spec, outdir, force, rel,
- unsigned, allow_root, signkey,
- not no_rebuild_index)
- except Exception as e:
- tty.warn('%s' % e)
- pass
+ tty.debug('creating binary cache file for package %s ' % spec.format())
+ bindist.build_tarball(spec, outdir, force, rel,
+ unsigned, allow_root, signkey,
+ not no_rebuild_index)
def createtarball(args):
@@ -376,9 +405,47 @@ def createtarball(args):
# restrict matching to current environment if one is active
env = ev.get_env(args, 'buildcache create')
- _createtarball(env, args.spec_yaml, args.specs, args.directory,
- args.key, args.no_deps, args.force, args.rel, args.unsigned,
- args.allow_root, args.no_rebuild_index)
+ output_location = None
+ if args.directory:
+ output_location = args.directory
+
+ # User meant to provide a path to a local directory.
+ # Ensure that they did not accidentally pass a URL.
+ scheme = url_util.parse(output_location, scheme='<missing>').scheme
+ if scheme != '<missing>':
+ raise ValueError(
+ '"--directory" expected a local path; got a URL, instead')
+
+ # User meant to provide a path to a local directory.
+ # Ensure that the mirror lookup does not mistake it for a named mirror.
+ output_location = 'file://' + output_location
+
+ elif args.mirror_name:
+ output_location = args.mirror_name
+
+ # User meant to provide the name of a preconfigured mirror.
+ # Ensure that the mirror lookup actually returns a named mirror.
+ result = spack.mirror.MirrorCollection().lookup(output_location)
+ if result.name == "<unnamed>":
+ raise ValueError(
+ 'no configured mirror named "{name}"'.format(
+ name=output_location))
+
+ elif args.mirror_url:
+ output_location = args.mirror_url
+
+ # User meant to provide a URL for an anonymous mirror.
+ # Ensure that they actually provided a URL.
+ scheme = url_util.parse(output_location, scheme='<missing>').scheme
+ if scheme == '<missing>':
+ raise ValueError(
+ '"{url}" is not a valid URL'.format(url=output_location))
+ add_spec = ('package' in args.things_to_install)
+ add_deps = ('dependencies' in args.things_to_install)
+
+ _createtarball(env, args.spec_yaml, args.specs, add_spec, add_deps,
+ output_location, args.key, args.force, args.rel,
+ args.unsigned, args.allow_root, args.no_rebuild_index)
def installtarball(args):
@@ -387,7 +454,8 @@ def installtarball(args):
tty.die("build cache file installation requires" +
" at least one package spec argument")
pkgs = set(args.specs)
- matches = match_downloaded_specs(pkgs, args.multiple, args.force)
+ matches = match_downloaded_specs(pkgs, args.multiple, args.force,
+ args.otherarch)
for match in matches:
install_tarball(match, args)
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 343915868c..eaeaf5337f 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -56,7 +56,8 @@ def checksum(parser, args):
tty.die("Could not find any versions for {0}".format(pkg.name))
version_lines = spack.stage.get_checksums_for_versions(
- url_dict, pkg.name, keep_stage=args.keep_stage)
+ url_dict, pkg.name, keep_stage=args.keep_stage,
+ fetch_options=pkg.fetch_options)
print()
print(version_lines)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index f9b7a382ea..304b531b49 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -245,7 +245,9 @@ class PythonPackageTemplate(PackageTemplate):
base_class_name = 'PythonPackage'
dependencies = """\
- # FIXME: Add dependencies if required.
+ # FIXME: Add dependencies if required. Only add the python dependency
+ # if you need specific versions. A generic python dependency is
+ # added implicity by the PythonPackage class.
# depends_on('python@2.X:2.Y,3.Z:', type=('build', 'run'))
# depends_on('py-setuptools', type='build')
# depends_on('py-foo', type=('build', 'run'))"""
diff --git a/lib/spack/spack/cmd/dependencies.py b/lib/spack/spack/cmd/dependencies.py
index e65e050bfa..7f390341ef 100644
--- a/lib/spack/spack/cmd/dependencies.py
+++ b/lib/spack/spack/cmd/dependencies.py
@@ -9,6 +9,7 @@ from llnl.util.tty.colify import colify
import spack.cmd
import spack.cmd.common.arguments as arguments
import spack.environment as ev
+import spack.package
import spack.repo
import spack.store
@@ -52,22 +53,15 @@ def dependencies(parser, args):
else:
spec = specs[0]
-
- if not spec.virtual:
- packages = [spec.package]
- else:
- packages = [
- spack.repo.get(s.name)
- for s in spack.repo.path.providers_for(spec)]
-
- dependencies = set()
- for pkg in packages:
- possible = pkg.possible_dependencies(
- args.transitive, args.expand_virtuals, deptype=args.deptype)
- dependencies.update(possible)
+ dependencies = spack.package.possible_dependencies(
+ spec,
+ transitive=args.transitive,
+ expand_virtuals=args.expand_virtuals,
+ deptype=args.deptype
+ )
if spec.name in dependencies:
- dependencies.remove(spec.name)
+ del dependencies[spec.name]
if dependencies:
colify(sorted(dependencies))
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index e60733f589..89fd15ffda 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -30,7 +30,7 @@ def setup_parser(subparser):
def inverted_dependencies():
"""Iterate through all packages and return a dictionary mapping package
- names to possible dependnecies.
+ names to possible dependencies.
Virtual packages are included as sources, so that you can query
dependents of, e.g., `mpi`, but virtuals are not included as
diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py
index 09f3fd31ee..9a00ad1c58 100644
--- a/lib/spack/spack/cmd/load.py
+++ b/lib/spack/spack/cmd/load.py
@@ -51,6 +51,7 @@ def load(parser, args):
for spec in spack.cmd.parse_specs(args.specs)]
if not args.shell:
+ specs_string = ' '.join(args.specs)
msg = [
"This command works best with Spack's shell support",
""
@@ -58,8 +59,8 @@ def load(parser, args):
'Or, if you want to use `spack load` without initializing',
'shell support, you can run one of these:',
'',
- ' eval `spack load --sh %s` # for bash/sh' % args.specs,
- ' eval `spack load --csh %s` # for csh/tcsh' % args.specs,
+ ' eval `spack load --sh %s` # for bash/sh' % specs_string,
+ ' eval `spack load --csh %s` # for csh/tcsh' % specs_string,
]
tty.msg(*msg)
return 1
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index 5206927895..1473550a56 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -45,7 +45,10 @@ def setup_parser(subparser):
" (this requires significant time and space)")
create_parser.add_argument(
'-f', '--file', help="file with specs of packages to put in mirror")
-
+ create_parser.add_argument(
+ '--skip-unstable-versions', action='store_true',
+ help="don't cache versions unless they identify a stable (unchanging)"
+ " source code")
create_parser.add_argument(
'-D', '--dependencies', action='store_true',
help="also fetch all dependencies")
@@ -308,7 +311,8 @@ def mirror_create(args):
existed = web_util.url_exists(directory)
# Actually do the work to create the mirror
- present, mirrored, error = spack.mirror.create(directory, mirror_specs)
+ present, mirrored, error = spack.mirror.create(
+ directory, mirror_specs, args.skip_unstable_versions)
p, m, e = len(present), len(mirrored), len(error)
verb = "updated" if existed else "created"
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index b1c0ad73c7..445d62d2ab 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -97,6 +97,7 @@ configuration_paths = (
config_defaults = {
'config': {
'debug': False,
+ 'connect_timeout': 10,
'verify_ssl': True,
'checksum': True,
'dirty': False,
@@ -279,6 +280,7 @@ class InternalConfigScope(ConfigScope):
self.sections = syaml.syaml_dict()
if data:
+ data = InternalConfigScope._process_dict_keyname_overrides(data)
for section in data:
dsec = data[section]
validate({section: dsec}, section_schemas[section])
@@ -305,6 +307,25 @@ class InternalConfigScope(ConfigScope):
def __repr__(self):
return '<InternalConfigScope: %s>' % self.name
+ @staticmethod
+ def _process_dict_keyname_overrides(data):
+ """Turn a trailing `:' in a key name into an override attribute."""
+ result = {}
+ for sk, sv in iteritems(data):
+ if sk.endswith(':'):
+ key = syaml.syaml_str(sk[:-1])
+ key.override = True
+ else:
+ key = sk
+
+ if isinstance(sv, dict):
+ result[key]\
+ = InternalConfigScope._process_dict_keyname_overrides(sv)
+ else:
+ result[key] = copy.copy(sv)
+
+ return result
+
class Configuration(object):
"""A full Spack configuration, from a hierarchy of config files.
@@ -504,14 +525,14 @@ class Configuration(object):
Accepts the path syntax described in ``get()``.
"""
- section, _, rest = path.partition(':')
+ parts = _process_config_path(path)
+ section = parts.pop(0)
- if not rest:
+ if not parts:
self.update_config(section, value, scope=scope)
else:
section_data = self.get_config(section, scope=scope)
- parts = rest.split(':')
data = section_data
while len(parts) > 1:
key = parts.pop(0)
@@ -611,7 +632,7 @@ def _config():
"""Singleton Configuration instance.
This constructs one instance associated with this module and returns
- it. It is bundled inside a function so that configuratoin can be
+ it. It is bundled inside a function so that configuration can be
initialized lazily.
Return:
@@ -762,17 +783,12 @@ def _merge_yaml(dest, source):
Config file authors can optionally end any attribute in a dict
with `::` instead of `:`, and the key will override that of the
parent instead of merging.
-
"""
def they_are(t):
return isinstance(dest, t) and isinstance(source, t)
- # If both are None, handle specially and return None.
- if source is None and dest is None:
- return None
-
# If source is None, overwrite with source.
- elif source is None:
+ if source is None:
return None
# Source list is prepended (for precedence)
@@ -798,8 +814,9 @@ def _merge_yaml(dest, source):
# to copy mark information on source keys to dest.
key_marks[sk] = sk
- # ensure that keys are marked in the destination. the key_marks dict
- # ensures we can get the actual source key objects from dest keys
+ # ensure that keys are marked in the destination. The
+ # key_marks dict ensures we can get the actual source key
+ # objects from dest keys
for dk in list(dest.keys()):
if dk in key_marks and syaml.markable(dk):
syaml.mark(dk, key_marks[dk])
@@ -811,9 +828,34 @@ def _merge_yaml(dest, source):
return dest
- # In any other case, overwrite with a copy of the source value.
- else:
- return copy.copy(source)
+ # If we reach here source and dest are either different types or are
+ # not both lists or dicts: replace with source.
+ return copy.copy(source)
+
+
+#
+# Process a path argument to config.set() that may contain overrides ('::' or
+# trailing ':')
+#
+def _process_config_path(path):
+ result = []
+ if path.startswith(':'):
+ raise syaml.SpackYAMLError("Illegal leading `:' in path `{0}'".
+ format(path), '')
+ seen_override_in_path = False
+ while path:
+ front, sep, path = path.partition(':')
+ if (sep and not path) or path.startswith(':'):
+ if seen_override_in_path:
+ raise syaml.SpackYAMLError("Meaningless second override"
+ " indicator `::' in path `{0}'".
+ format(path), '')
+ path = path.lstrip(':')
+ front = syaml.syaml_str(front)
+ front.override = True
+ seen_override_in_path = True
+ result.append(front)
+ return result
#
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index f3c88a75c3..ca9ed67fb5 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -18,32 +18,33 @@ Prior to the implementation of this store, a directory layout served
as the authoritative database of packages in Spack. This module
provides a cache and a sanity checking mechanism for what is in the
filesystem.
-
"""
+
+import contextlib
import datetime
-import time
import os
-import sys
import socket
-import contextlib
-from six import string_types
-from six import iteritems
-
-from ruamel.yaml.error import MarkedYAMLError, YAMLError
+import sys
+import time
+try:
+ import uuid
+ _use_uuid = True
+except ImportError:
+ _use_uuid = False
+ pass
import llnl.util.tty as tty
-from llnl.util.filesystem import mkdirp
-
-import spack.store
+import six
import spack.repo
import spack.spec
+import spack.store
import spack.util.lock as lk
-import spack.util.spack_yaml as syaml
import spack.util.spack_json as sjson
-from spack.filesystem_view import YamlFilesystemView
-from spack.util.crypto import bit_length
+from llnl.util.filesystem import mkdirp
from spack.directory_layout import DirectoryLayoutError
from spack.error import SpackError
+from spack.filesystem_view import YamlFilesystemView
+from spack.util.crypto import bit_length
from spack.version import Version
# TODO: Provide an API automatically retyring a build after detecting and
@@ -284,29 +285,22 @@ class Database(object):
exist. This is the ``db_dir``.
The Database will attempt to read an ``index.json`` file in
- ``db_dir``. If it does not find one, it will fall back to read
- an ``index.yaml`` if one is present. If that does not exist, it
- will create a database when needed by scanning the entire
- Database root for ``spec.yaml`` files according to Spack's
- ``DirectoryLayout``.
+ ``db_dir``. If that does not exist, it will create a database
+ when needed by scanning the entire Database root for ``spec.yaml``
+ files according to Spack's ``DirectoryLayout``.
Caller may optionally provide a custom ``db_dir`` parameter
- where data will be stored. This is intended to be used for
+ where data will be stored. This is intended to be used for
testing the Database class.
-
"""
self.root = root
- if db_dir is None:
- # If the db_dir is not provided, default to within the db root.
- self._db_dir = os.path.join(self.root, _db_dirname)
- else:
- # Allow customizing the database directory location for testing.
- self._db_dir = db_dir
+ # If the db_dir is not provided, default to within the db root.
+ self._db_dir = db_dir or os.path.join(self.root, _db_dirname)
# Set up layout of database files within the db dir
- self._old_yaml_index_path = os.path.join(self._db_dir, 'index.yaml')
self._index_path = os.path.join(self._db_dir, 'index.json')
+ self._verifier_path = os.path.join(self._db_dir, 'index_verifier')
self._lock_path = os.path.join(self._db_dir, 'lock')
# This is for other classes to use to lock prefix directories.
@@ -328,6 +322,7 @@ class Database(object):
mkdirp(self._failure_dir)
self.is_upstream = is_upstream
+ self.last_seen_verifier = ''
# initialize rest of state.
self.db_lock_timeout = (
@@ -554,7 +549,8 @@ class Database(object):
prefix_lock.release_write()
def _write_to_file(self, stream):
- """Write out the databsae to a JSON file.
+ """Write out the database in JSON format to the stream passed
+ as argument.
This function does not do any locking or transactions.
"""
@@ -576,9 +572,8 @@ class Database(object):
try:
sjson.dump(database, stream)
- except YAMLError as e:
- raise syaml.SpackYAMLError(
- "error writing YAML database:", str(e))
+ except (TypeError, ValueError) as e:
+ raise sjson.SpackJSONError("error writing JSON database:", str(e))
def _read_spec_from_dict(self, hash_key, installs):
"""Recursively construct a spec from a hash in a YAML database.
@@ -649,28 +644,15 @@ class Database(object):
spec._add_dependency(child, dtypes)
- def _read_from_file(self, stream, format='json'):
- """
- Fill database from file, do not maintain old data
- Translate the spec portions from node-dict form to spec form
+ def _read_from_file(self, filename):
+ """Fill database from file, do not maintain old data.
+ Translate the spec portions from node-dict form to spec form.
Does not do any locking.
"""
- if format.lower() == 'json':
- load = sjson.load
- elif format.lower() == 'yaml':
- load = syaml.load
- else:
- raise ValueError("Invalid database format: %s" % format)
-
try:
- if isinstance(stream, string_types):
- with open(stream, 'r') as f:
- fdata = load(f)
- else:
- fdata = load(stream)
- except MarkedYAMLError as e:
- raise syaml.SpackYAMLError("error parsing YAML database:", str(e))
+ with open(filename, 'r') as f:
+ fdata = sjson.load(f)
except Exception as e:
raise CorruptDatabaseError("error parsing database:", str(e))
@@ -682,12 +664,12 @@ class Database(object):
raise CorruptDatabaseError(
"Spack database is corrupt: %s" % msg, self._index_path)
- check('database' in fdata, "No 'database' attribute in YAML.")
+ check('database' in fdata, "no 'database' attribute in JSON DB.")
# High-level file checks
db = fdata['database']
- check('installs' in db, "No 'installs' in YAML DB.")
- check('version' in db, "No 'version' in YAML DB.")
+ check('installs' in db, "no 'installs' in JSON DB.")
+ check('version' in db, "no 'version' in JSON DB.")
installs = db['installs']
@@ -763,7 +745,6 @@ class Database(object):
"""Build database index from scratch based on a directory layout.
Locks the DB if it isn't locked already.
-
"""
if self.is_upstream:
raise UpstreamDatabaseLockingError(
@@ -927,7 +908,6 @@ class Database(object):
after the start of the next transaction, when it read from disk again.
This routine does no locking.
-
"""
# Do not write if exceptions were raised
if type is not None:
@@ -941,6 +921,11 @@ class Database(object):
with open(temp_file, 'w') as f:
self._write_to_file(f)
os.rename(temp_file, self._index_path)
+ if _use_uuid:
+ with open(self._verifier_path, 'w') as f:
+ new_verifier = str(uuid.uuid4())
+ f.write(new_verifier)
+ self.last_seen_verifier = new_verifier
except BaseException as e:
tty.debug(e)
# Clean up temp file if something goes wrong.
@@ -952,35 +937,33 @@ class Database(object):
"""Re-read Database from the data in the set location.
This does no locking, with one exception: it will automatically
- migrate an index.yaml to an index.json if possible. This requires
- taking a write lock.
-
+ try to regenerate a missing DB if local. This requires taking a
+ write lock.
"""
if os.path.isfile(self._index_path):
- # Read from JSON file if a JSON database exists
- self._read_from_file(self._index_path, format='json')
-
- elif os.path.isfile(self._old_yaml_index_path):
- if (not self.is_upstream) and os.access(
- self._db_dir, os.R_OK | os.W_OK):
- # if we can write, then read AND write a JSON file.
- self._read_from_file(self._old_yaml_index_path, format='yaml')
- with lk.WriteTransaction(self.lock):
- self._write(None, None, None)
- else:
- # Read chck for a YAML file if we can't find JSON.
- self._read_from_file(self._old_yaml_index_path, format='yaml')
+ current_verifier = ''
+ if _use_uuid:
+ try:
+ with open(self._verifier_path, 'r') as f:
+ current_verifier = f.read()
+ except BaseException:
+ pass
+ if ((current_verifier != self.last_seen_verifier) or
+ (current_verifier == '')):
+ self.last_seen_verifier = current_verifier
+ # Read from file if a database exists
+ self._read_from_file(self._index_path)
+ return
+ elif self.is_upstream:
+ raise UpstreamDatabaseLockingError(
+ "No database index file is present, and upstream"
+ " databases cannot generate an index file")
- else:
- if self.is_upstream:
- raise UpstreamDatabaseLockingError(
- "No database index file is present, and upstream"
- " databases cannot generate an index file")
- # The file doesn't exist, try to traverse the directory.
- # reindex() takes its own write lock, so no lock here.
- with lk.WriteTransaction(self.lock):
- self._write(None, None, None)
- self.reindex(spack.store.layout)
+ # The file doesn't exist, try to traverse the directory.
+ # reindex() takes its own write lock, so no lock here.
+ with lk.WriteTransaction(self.lock):
+ self._write(None, None, None)
+ self.reindex(spack.store.layout)
def _add(
self,
@@ -1060,7 +1043,9 @@ class Database(object):
)
# Connect dependencies from the DB to the new copy.
- for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)):
+ for name, dep in six.iteritems(
+ spec.dependencies_dict(_tracked_deps)
+ ):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, dep.deptypes)
@@ -1133,8 +1118,7 @@ class Database(object):
rec.ref_count += 1
def _remove(self, spec):
- """Non-locking version of remove(); does real work.
- """
+ """Non-locking version of remove(); does real work."""
key = self._get_matching_spec_key(spec)
rec = self._data[key]
@@ -1378,7 +1362,7 @@ class Database(object):
# TODO: handling of hashes restriction is not particularly elegant.
hash_key = query_spec.dag_hash()
if (hash_key in self._data and
- (not hashes or hash_key in hashes)):
+ (not hashes or hash_key in hashes)):
return [self._data[hash_key].spec]
else:
return []
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index a01bc143aa..d7613ae58a 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -256,7 +256,7 @@ class URLFetchStrategy(FetchStrategy):
self.digest = kwargs[h]
self.expand_archive = kwargs.get('expand', True)
- self.extra_curl_options = kwargs.get('curl_options', [])
+ self.extra_options = kwargs.get('fetch_options', {})
self._curl = None
self.extension = kwargs.get('extension', None)
@@ -325,8 +325,6 @@ class URLFetchStrategy(FetchStrategy):
'-D',
'-', # print out HTML headers
'-L', # resolve 3xx redirects
- # Timeout if can't establish a connection after 10 sec.
- '--connect-timeout', '10',
url,
]
@@ -338,7 +336,22 @@ class URLFetchStrategy(FetchStrategy):
else:
curl_args.append('-sS') # just errors when not.
- curl_args += self.extra_curl_options
+ connect_timeout = spack.config.get('config:connect_timeout')
+
+ if self.extra_options:
+ cookie = self.extra_options.get('cookie')
+ if cookie:
+ curl_args.append('-j') # junk cookies
+ curl_args.append('-b') # specify cookie
+ curl_args.append(cookie)
+
+ timeout = self.extra_options.get('timeout')
+ if timeout:
+ connect_timeout = max(connect_timeout, int(timeout))
+
+ if connect_timeout > 0:
+ # Timeout if can't establish a connection after n sec.
+ curl_args.extend(['--connect-timeout', str(connect_timeout)])
# Run curl but grab the mime type from the http headers
curl = self.curl
@@ -1148,6 +1161,15 @@ class S3FetchStrategy(URLFetchStrategy):
raise FailedDownloadError(self.url)
+def stable_target(fetcher):
+ """Returns whether the fetcher target is expected to have a stable
+ checksum. This is only true if the target is a preexisting archive
+ file."""
+ if isinstance(fetcher, URLFetchStrategy) and fetcher.cachable:
+ return True
+ return False
+
+
def from_url(url):
"""Given a URL, find an appropriate fetch strategy for it.
Currently just gives you a URLFetchStrategy that uses curl.
@@ -1225,7 +1247,8 @@ def _check_version_attributes(fetcher, pkg, version):
def _extrapolate(pkg, version):
"""Create a fetcher from an extrapolated URL for this version."""
try:
- return URLFetchStrategy(pkg.url_for_version(version))
+ return URLFetchStrategy(pkg.url_for_version(version),
+ fetch_options=pkg.fetch_options)
except spack.package.NoURLError:
msg = ("Can't extrapolate a URL for version %s "
"because package %s defines no URLs")
@@ -1245,6 +1268,7 @@ def _from_merged_attrs(fetcher, pkg, version):
url = getattr(pkg, fetcher.url_attr)
attrs = {fetcher.url_attr: url}
+ attrs['fetch_options'] = pkg.fetch_options
attrs.update(pkg.versions[version])
return fetcher(**attrs)
@@ -1267,8 +1291,10 @@ def for_package_version(pkg, version):
if version not in pkg.versions:
return _extrapolate(pkg, version)
+ # Set package args first so version args can override them
+ args = {'fetch_options': pkg.fetch_options}
# Grab a dict of args out of the package version dict
- args = pkg.versions[version]
+ args.update(pkg.versions[version])
# If the version specifies a `url_attr` directly, use that.
for fetcher in all_strategies:
@@ -1348,7 +1374,8 @@ def from_list_url(pkg):
args.get('checksum'))
# construct a fetcher
- return URLFetchStrategy(url_from_list, checksum)
+ return URLFetchStrategy(url_from_list, checksum,
+ fetch_options=pkg.fetch_options)
except KeyError as e:
tty.debug(e)
tty.msg("Cannot find version %s in url_list" % pkg.version)
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 3f6b2b6a0e..045ca5ffec 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -401,7 +401,7 @@ def get_matching_versions(specs, num_versions=1):
return matching
-def create(path, specs):
+def create(path, specs, skip_unstable_versions=False):
"""Create a directory to be used as a spack mirror, and fill it with
package archives.
@@ -409,6 +409,9 @@ def create(path, specs):
path: Path to create a mirror directory hierarchy in.
specs: Any package versions matching these specs will be added \
to the mirror.
+ skip_unstable_versions: if true, this skips adding resources when
+ they do not have a stable archive checksum (as determined by
+ ``fetch_strategy.stable_target``)
Return Value:
Returns a tuple of lists: (present, mirrored, error)
@@ -440,16 +443,14 @@ def create(path, specs):
raise MirrorError(
"Cannot create directory '%s':" % mirror_root, str(e))
- mirror_cache = spack.caches.MirrorCache(mirror_root)
+ mirror_cache = spack.caches.MirrorCache(
+ mirror_root, skip_unstable_versions=skip_unstable_versions)
mirror_stats = MirrorStats()
- try:
- spack.caches.mirror_cache = mirror_cache
- # Iterate through packages and download all safe tarballs for each
- for spec in specs:
- mirror_stats.next_spec(spec)
- add_single_spec(spec, mirror_root, mirror_stats)
- finally:
- spack.caches.mirror_cache = None
+
+ # Iterate through packages and download all safe tarballs for each
+ for spec in specs:
+ mirror_stats.next_spec(spec)
+ _add_single_spec(spec, mirror_cache, mirror_stats)
return mirror_stats.stats()
@@ -495,7 +496,7 @@ class MirrorStats(object):
self.errors.add(self.current_spec)
-def add_single_spec(spec, mirror_root, mirror_stats):
+def _add_single_spec(spec, mirror, mirror_stats):
tty.msg("Adding package {pkg} to mirror".format(
pkg=spec.format("{name}{@version}")
))
@@ -503,10 +504,10 @@ def add_single_spec(spec, mirror_root, mirror_stats):
while num_retries > 0:
try:
with spec.package.stage as pkg_stage:
- pkg_stage.cache_mirror(mirror_stats)
+ pkg_stage.cache_mirror(mirror, mirror_stats)
for patch in spec.package.all_patches():
- if patch.cache():
- patch.cache().cache_mirror(mirror_stats)
+ if patch.stage:
+ patch.stage.cache_mirror(mirror, mirror_stats)
patch.clean()
exception = None
break
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index b2d841f145..9156d7d0dd 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -477,6 +477,9 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
#: This is currently only used by package sanity tests.
manual_download = False
+ #: Set of additional options used when fetching package versions.
+ fetch_options = {}
+
#
# Set default licensing information
#
@@ -602,11 +605,10 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
"""
deptype = spack.dependency.canonical_deptype(deptype)
- if visited is None:
- visited = {cls.name: set()}
+ visited = {} if visited is None else visited
+ missing = {} if missing is None else missing
- if missing is None:
- missing = {cls.name: set()}
+ visited.setdefault(cls.name, set())
for name, conditions in cls.dependencies.items():
# check whether this dependency could be of the type asked for
@@ -621,6 +623,7 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
providers = spack.repo.path.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
+ visited.setdefault(cls.name, set()).add(name)
visited.setdefault(name, set())
continue
else:
@@ -1135,8 +1138,8 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
for patch in self.spec.patches:
patch.fetch()
- if patch.cache():
- patch.cache().cache_local()
+ if patch.stage:
+ patch.stage.cache_local()
def do_stage(self, mirror_only=False):
"""Unpacks and expands the fetched tarball."""
@@ -2151,26 +2154,27 @@ def possible_dependencies(*pkg_or_spec, **kwargs):
See ``PackageBase.possible_dependencies`` for details.
"""
- transitive = kwargs.get('transitive', True)
- expand_virtuals = kwargs.get('expand_virtuals', True)
- deptype = kwargs.get('deptype', 'all')
- missing = kwargs.get('missing')
-
packages = []
for pos in pkg_or_spec:
if isinstance(pos, PackageMeta):
- pkg = pos
- elif isinstance(pos, spack.spec.Spec):
- pkg = pos.package
- else:
- pkg = spack.spec.Spec(pos).package
+ packages.append(pos)
+ continue
- packages.append(pkg)
+ if not isinstance(pos, spack.spec.Spec):
+ pos = spack.spec.Spec(pos)
+
+ if spack.repo.path.is_virtual(pos.name):
+ packages.extend(
+ p.package_class
+ for p in spack.repo.path.providers_for(pos.name)
+ )
+ continue
+ else:
+ packages.append(pos.package_class)
visited = {}
for pkg in packages:
- pkg.possible_dependencies(
- transitive, expand_virtuals, deptype, visited, missing)
+ pkg.possible_dependencies(visited=visited, **kwargs)
return visited
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index bcb45387a8..3a839c5b0f 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -85,7 +85,8 @@ class Patch(object):
apply_patch(stage, self.path, self.level, self.working_dir)
- def cache(self):
+ @property
+ def stage(self):
return None
def to_dict(self):
@@ -248,9 +249,6 @@ class UrlPatch(Patch):
self._stage.create()
return self._stage
- def cache(self):
- return self.stage
-
def clean(self):
self.stage.destroy()
@@ -348,7 +346,8 @@ class PatchCache(object):
sha_index = self.index.get(sha256)
if not sha_index:
raise NoSuchPatchError(
- "Couldn't find patch with sha256: %s" % sha256)
+ "Couldn't find patch for package %s with sha256: %s"
+ % (pkg.fullname, sha256))
patch_dict = sha_index.get(pkg.fullname)
if not patch_dict:
diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py
index 56fc993b5f..c7d442a427 100644
--- a/lib/spack/spack/relocate.py
+++ b/lib/spack/spack/relocate.py
@@ -13,6 +13,9 @@ import spack.cmd
import llnl.util.lang
from spack.util.executable import Executable, ProcessError
import llnl.util.tty as tty
+from macholib.MachO import MachO
+from spack.spec import Spec
+import macholib.mach_o
class InstallRootStringException(spack.error.SpackError):
@@ -41,45 +44,47 @@ class BinaryStringReplacementException(spack.error.SpackError):
(file_path, old_len, new_len))
-class MissingMacholibException(spack.error.SpackError):
+class BinaryTextReplaceException(spack.error.SpackError):
"""
- Raised when the size of the file changes after binary path substitution.
+ Raised when the new install path is shorter than the old install path
+ so binary text replacement cannot occur.
"""
- def __init__(self, error):
- super(MissingMacholibException, self).__init__(
- "%s\n"
- "Python package macholib needs to be avaiable to list\n"
- "and modify a mach-o binary's rpaths, deps and id.\n"
- "Use virtualenv with pip install macholib or\n"
- "use spack to install the py-macholib package\n"
- "spack install py-macholib\n"
- "spack activate py-macholib\n"
- "spack load python\n"
- % error)
+ def __init__(self, old_path, new_path):
+ msg = "New path longer than old path: binary text"
+ msg += " replacement not possible."
+ err_msg = "The new path %s" % new_path
+ err_msg += " is longer than the old path %s.\n" % old_path
+ err_msg += "Text replacement in binaries will not work.\n"
+ err_msg += "Create buildcache from an install path "
+ err_msg += "longer than new path."
+ super(BinaryTextReplaceException, self).__init__(msg, err_msg)
def get_patchelf():
"""
+ Returns the full patchelf binary path if available in $PATH.
Builds and installs spack patchelf package on linux platforms
- using the first concretized spec.
- Returns the full patchelf binary path.
+ using the first concretized spec if it is not installed and
+ returns the full patchelf binary path.
"""
# as we may need patchelf, find out where it is
patchelf = spack.util.executable.which('patchelf')
if patchelf is not None:
return patchelf.path
- else:
- if str(spack.architecture.platform()) == 'test':
- return None
- if str(spack.architecture.platform()) == 'darwin':
- return None
- patchelf_spec = spack.cmd.parse_specs("patchelf", concretize=True)[0]
- patchelf = spack.repo.get(patchelf_spec)
- if not patchelf.installed:
- patchelf.do_install(use_cache=False)
+ patchelf_spec = Spec('patchelf').concretized()
+ patchelf = patchelf_spec.package
+ if patchelf.installed:
patchelf_executable = os.path.join(patchelf.prefix.bin, "patchelf")
return patchelf_executable
+ else:
+ if (str(spack.architecture.platform()) == 'test' or
+ str(spack.architecture.platform()) == 'darwin'):
+ return None
+ else:
+ patchelf.do_install()
+ patchelf_executable = os.path.join(patchelf.prefix.bin, "patchelf")
+ return patchelf_executable
def get_existing_elf_rpaths(path_name):
@@ -95,33 +100,53 @@ def get_existing_elf_rpaths(path_name):
else:
patchelf = Executable(get_patchelf())
+ rpaths = list()
try:
output = patchelf('--print-rpath', '%s' %
path_name, output=str, error=str)
- return output.rstrip('\n').split(':')
+ rpaths = output.rstrip('\n').split(':')
except ProcessError as e:
- tty.debug('patchelf --print-rpath produced an error on %s' %
- path_name, e)
- return []
- return
+ msg = 'patchelf --print-rpath %s produced an error %s' % (path_name, e)
+ tty.warn(msg)
+ return rpaths
-def get_relative_rpaths(path_name, orig_dir, orig_rpaths):
+def get_relative_elf_rpaths(path_name, orig_layout_root, orig_rpaths):
"""
- Replaces orig_dir with relative path from dirname(path_name) if an rpath
- in orig_rpaths contains orig_path. Prefixes $ORIGIN
+ Replaces orig rpath with relative path from dirname(path_name) if an rpath
+ in orig_rpaths contains orig_layout_root. Prefixes $ORIGIN
to relative paths and returns replacement rpaths.
"""
rel_rpaths = []
for rpath in orig_rpaths:
- if re.match(orig_dir, rpath):
+ if re.match(orig_layout_root, rpath):
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
- rel_rpaths.append('$ORIGIN/%s' % rel)
+ rel_rpaths.append(os.path.join('$ORIGIN', '%s' % rel))
else:
rel_rpaths.append(rpath)
return rel_rpaths
+def get_normalized_elf_rpaths(orig_path_name, rel_rpaths):
+ """
+ Normalize the relative rpaths with respect to the original path name
+ of the file. If the rpath starts with $ORIGIN replace $ORIGIN with the
+ dirname of the original path name and then normalize the rpath.
+ A dictionary mapping relativized rpaths to normalized rpaths is returned.
+ """
+ norm_rpaths = list()
+ for rpath in rel_rpaths:
+ if rpath.startswith('$ORIGIN'):
+ sub = re.sub(re.escape('$ORIGIN'),
+ os.path.dirname(orig_path_name),
+ rpath)
+ norm = os.path.normpath(sub)
+ norm_rpaths.append(norm)
+ else:
+ norm_rpaths.append(rpath)
+ return norm_rpaths
+
+
def set_placeholder(dirname):
"""
return string of @'s with same length
@@ -129,183 +154,157 @@ def set_placeholder(dirname):
return '@' * len(dirname)
-def get_placeholder_rpaths(path_name, orig_rpaths):
+def macho_make_paths_relative(path_name, old_layout_root,
+ rpaths, deps, idpath):
"""
- Replaces original layout root dir with a placeholder string in all rpaths.
+ Return a dictionary mapping the original rpaths to the relativized rpaths.
+ This dictionary is used to replace paths in mach-o binaries.
+ Replace old_dir with relative path from dirname of path name
+ in rpaths and deps; idpath is replaced with @rpath/libname.
"""
- rel_rpaths = []
- orig_dir = spack.store.layout.root
- for rpath in orig_rpaths:
- if re.match(orig_dir, rpath):
- placeholder = set_placeholder(orig_dir)
- rel = re.sub(orig_dir, placeholder, rpath)
- rel_rpaths.append('%s' % rel)
- else:
- rel_rpaths.append(rpath)
- return rel_rpaths
-
-
-def macho_get_paths(path_name):
- """
- Examines the output of otool -l path_name for these three fields:
- LC_ID_DYLIB, LC_LOAD_DYLIB, LC_RPATH and parses out the rpaths,
- dependiencies and library id.
- Returns these values.
- """
- otool = Executable('otool')
- output = otool("-l", path_name, output=str, err=str)
- last_cmd = None
- idpath = None
- rpaths = []
- deps = []
- for line in output.split('\n'):
- match = re.search('( *[a-zA-Z]+ )(.*)', line)
- if match:
- lhs = match.group(1).lstrip().rstrip()
- rhs = match.group(2)
- match2 = re.search(r'(.*) \(.*\)', rhs)
- if match2:
- rhs = match2.group(1)
- if lhs == 'cmd':
- last_cmd = rhs
- if lhs == 'path' and last_cmd == 'LC_RPATH':
- rpaths.append(rhs)
- if lhs == 'name' and last_cmd == 'LC_ID_DYLIB':
- idpath = rhs
- if lhs == 'name' and last_cmd == 'LC_LOAD_DYLIB':
- deps.append(rhs)
- return rpaths, deps, idpath
-
-
-def macho_make_paths_relative(path_name, old_dir, rpaths, deps, idpath):
- """
- Replace old_dir with relative path from dirname(path_name)
- in rpaths and deps; idpaths are replaced with @rpath/libname as needed;
- replacement are returned.
- """
- new_idpath = None
+ paths_to_paths = dict()
if idpath:
- new_idpath = '@rpath/%s' % os.path.basename(idpath)
- new_rpaths = list()
- new_deps = list()
+ paths_to_paths[idpath] = os.path.join(
+ '@rpath', '%s' % os.path.basename(idpath))
for rpath in rpaths:
- if re.match(old_dir, rpath):
+ if re.match(old_layout_root, rpath):
rel = os.path.relpath(rpath, start=os.path.dirname(path_name))
- new_rpaths.append('@loader_path/%s' % rel)
+ paths_to_paths[rpath] = os.path.join('@loader_path', '%s' % rel)
else:
- new_rpaths.append(rpath)
+ paths_to_paths[rpath] = rpath
for dep in deps:
- if re.match(old_dir, dep):
+ if re.match(old_layout_root, dep):
rel = os.path.relpath(dep, start=os.path.dirname(path_name))
- new_deps.append('@loader_path/%s' % rel)
+ paths_to_paths[dep] = os.path.join('@loader_path', '%s' % rel)
else:
- new_deps.append(dep)
- return (new_rpaths, new_deps, new_idpath)
+ paths_to_paths[dep] = dep
+ return paths_to_paths
-def macho_make_paths_placeholder(rpaths, deps, idpath):
+def macho_make_paths_normal(orig_path_name, rpaths, deps, idpath):
"""
- Replace old_dir with a placeholder of the same length
- in rpaths and deps and idpaths is needed.
- replacement are returned.
+ Return a dictionary mapping the relativized rpaths to the original rpaths.
+ This dictionary is used to replace paths in mach-o binaries.
+ Replace '@loader_path' with the dirname of the origname path name
+ in rpaths and deps; idpath is replaced with the original path name
"""
- new_idpath = None
- old_dir = spack.store.layout.root
- placeholder = set_placeholder(old_dir)
+ rel_to_orig = dict()
if idpath:
- new_idpath = re.sub(old_dir, placeholder, idpath)
- new_rpaths = list()
- new_deps = list()
+ rel_to_orig[idpath] = orig_path_name
+
for rpath in rpaths:
- if re.match(old_dir, rpath):
- ph = re.sub(old_dir, placeholder, rpath)
- new_rpaths.append('%s' % ph)
+ if re.match('@loader_path', rpath):
+ norm = os.path.normpath(re.sub(re.escape('@loader_path'),
+ os.path.dirname(orig_path_name),
+ rpath))
+ rel_to_orig[rpath] = norm
else:
- new_rpaths.append(rpath)
+ rel_to_orig[rpath] = rpath
for dep in deps:
- if re.match(old_dir, dep):
- ph = re.sub(old_dir, placeholder, dep)
- new_deps.append('%s' % ph)
+ if re.match('@loader_path', dep):
+ norm = os.path.normpath(re.sub(re.escape('@loader_path'),
+ os.path.dirname(orig_path_name),
+ dep))
+ rel_to_orig[dep] = norm
else:
- new_deps.append(dep)
- return (new_rpaths, new_deps, new_idpath)
-
+ rel_to_orig[dep] = dep
+ return rel_to_orig
+
+
+def macho_find_paths(orig_rpaths, deps, idpath,
+ old_layout_root, prefix_to_prefix):
+ """
+ Inputs
+ original rpaths from mach-o binaries
+ dependency libraries for mach-o binaries
+ id path of mach-o libraries
+ old install directory layout root
+ prefix_to_prefix dictionary which maps prefixes in the old directory layout
+ to directories in the new directory layout
+ Output
+ paths_to_paths dictionary which maps all of the old paths to new paths
+ """
+ paths_to_paths = dict()
+ for orig_rpath in orig_rpaths:
+ if orig_rpath.startswith(old_layout_root):
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if orig_rpath.startswith(old_prefix):
+ new_rpath = re.sub(re.escape(old_prefix),
+ new_prefix, orig_rpath)
+ paths_to_paths[orig_rpath] = new_rpath
+ else:
+ paths_to_paths[orig_rpath] = orig_rpath
-def macho_replace_paths(old_dir, new_dir, rpaths, deps, idpath):
- """
- Replace old_dir with new_dir in rpaths, deps and idpath
- and return replacements
- """
- new_idpath = None
if idpath:
- new_idpath = idpath.replace(old_dir, new_dir)
- new_rpaths = list()
- new_deps = list()
- for rpath in rpaths:
- new_rpath = rpath.replace(old_dir, new_dir)
- new_rpaths.append(new_rpath)
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if idpath.startswith(old_prefix):
+ paths_to_paths[idpath] = re.sub(
+ re.escape(old_prefix), new_prefix, idpath)
for dep in deps:
- new_dep = dep.replace(old_dir, new_dir)
- new_deps.append(new_dep)
- return new_rpaths, new_deps, new_idpath
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if dep.startswith(old_prefix):
+ paths_to_paths[dep] = re.sub(
+ re.escape(old_prefix), new_prefix, dep)
+ if dep.startswith('@'):
+ paths_to_paths[dep] = dep
+
+ return paths_to_paths
def modify_macho_object(cur_path, rpaths, deps, idpath,
- new_rpaths, new_deps, new_idpath):
+ paths_to_paths):
"""
- Modify MachO binary path_name by replacing old_dir with new_dir
- or the relative path to spack install root.
- The old install dir in LC_ID_DYLIB is replaced with the new install dir
- using install_name_tool -id newid binary
- The old install dir in LC_LOAD_DYLIB is replaced with the new install dir
- using install_name_tool -change old new binary
- The old install dir in LC_RPATH is replaced with the new install dir using
- install_name_tool -rpath old new binary
+ This function is used to make machO buildcaches on macOS by
+ replacing old paths with new paths using install_name_tool
+ Inputs:
+ mach-o binary to be modified
+ original rpaths
+ original dependency paths
+ original id path if a mach-o library
+ dictionary mapping paths in old install layout to new install layout
"""
# avoid error message for libgcc_s
if 'libgcc_' in cur_path:
return
install_name_tool = Executable('install_name_tool')
- if new_idpath and not idpath == new_idpath:
- install_name_tool('-id', new_idpath, str(cur_path))
-
- if len(deps) == len(new_deps):
- for orig, new in zip(deps, new_deps):
- if not orig == new:
- install_name_tool('-change', orig, new, str(cur_path))
-
- if len(rpaths) == len(new_rpaths):
- for orig, new in zip(rpaths, new_rpaths):
- if not orig == new:
- install_name_tool('-rpath', orig, new, str(cur_path))
+ if idpath:
+ new_idpath = paths_to_paths.get(idpath, None)
+ if new_idpath and not idpath == new_idpath:
+ install_name_tool('-id', new_idpath, str(cur_path))
+ for dep in deps:
+ new_dep = paths_to_paths.get(dep)
+ if new_dep and dep != new_dep:
+ install_name_tool('-change', dep, new_dep, str(cur_path))
+
+ for orig_rpath in rpaths:
+ new_rpath = paths_to_paths.get(orig_rpath)
+ if new_rpath and not orig_rpath == new_rpath:
+ install_name_tool('-rpath', orig_rpath, new_rpath, str(cur_path))
return
-def modify_object_macholib(cur_path, old_dir, new_dir):
+def modify_object_macholib(cur_path, paths_to_paths):
"""
- Modify MachO binary path_name by replacing old_dir with new_dir
- or the relative path to spack install root.
- The old install dir in LC_ID_DYLIB is replaced with the new install dir
- using py-macholib
- The old install dir in LC_LOAD_DYLIB is replaced with the new install dir
- using py-macholib
- The old install dir in LC_RPATH is replaced with the new install dir using
- using py-macholib
+ This function is used when install machO buildcaches on linux by
+ rewriting mach-o loader commands for dependency library paths of
+ mach-o binaries and the id path for mach-o libraries.
+ Rewritting of rpaths is handled by replace_prefix_bin.
+ Inputs
+ mach-o binary to be modified
+ dictionary mapping paths in old install layout to new install layout
"""
- if cur_path.endswith('.o'):
- return
- try:
- from macholib.MachO import MachO
- except ImportError as e:
- raise MissingMacholibException(e)
-
- def match_func(cpath):
- rpath = cpath.replace(old_dir, new_dir)
- return rpath
dll = MachO(cur_path)
- dll.rewriteLoadCommands(match_func)
+
+ changedict = paths_to_paths
+
+ def changefunc(path):
+ npath = changedict.get(path, None)
+ return npath
+
+ dll.rewriteLoadCommands(changefunc)
+
try:
f = open(dll.filename, 'rb+')
for header in dll.headers:
@@ -320,14 +319,32 @@ def modify_object_macholib(cur_path, old_dir, new_dir):
return
-def strings_contains_installroot(path_name, root_dir):
+def macholib_get_paths(cur_path):
"""
- Check if the file contain the install root string.
+ Get rpaths, dependencies and id of mach-o objects
+ using python macholib package
"""
- strings = Executable('strings')
- output = strings('%s' % path_name,
- output=str, err=str)
- return (root_dir in output or spack.paths.prefix in output)
+ dll = MachO(cur_path)
+
+ ident = None
+ rpaths = list()
+ deps = list()
+ for header in dll.headers:
+ rpaths = [data.rstrip(b'\0').decode('utf-8')
+ for load_command, dylib_command, data in header.commands if
+ load_command.cmd == macholib.mach_o.LC_RPATH]
+ deps = [data.rstrip(b'\0').decode('utf-8')
+ for load_command, dylib_command, data in header.commands if
+ load_command.cmd == macholib.mach_o.LC_LOAD_DYLIB]
+ idents = [data.rstrip(b'\0').decode('utf-8')
+ for load_command, dylib_command, data in header.commands if
+ load_command.cmd == macholib.mach_o.LC_ID_DYLIB]
+ if len(idents) == 1:
+ ident = idents[0]
+ tty.debug('ident: %s' % ident)
+ tty.debug('deps: %s' % deps)
+ tty.debug('rpaths: %s' % rpaths)
+ return (rpaths, deps, ident)
def modify_elf_object(path_name, new_rpaths):
@@ -338,9 +355,9 @@ def modify_elf_object(path_name, new_rpaths):
new_joined = ':'.join(new_rpaths)
# if we're relocating patchelf itself, use it
+ bak_path = path_name + ".bak"
if path_name[-13:] == "/bin/patchelf":
- bak_path = path_name + ".bak"
shutil.copy(path_name, bak_path)
patchelf = Executable(bak_path)
else:
@@ -350,9 +367,11 @@ def modify_elf_object(path_name, new_rpaths):
patchelf('--force-rpath', '--set-rpath', '%s' % new_joined,
'%s' % path_name, output=str, error=str)
except ProcessError as e:
- tty.die('patchelf --set-rpath %s failed' %
- path_name, e)
- pass
+ msg = 'patchelf --force-rpath --set-rpath %s failed with error %s' % (
+ path_name, e)
+ tty.warn(msg)
+ if os.path.exists(bak_path):
+ os.remove(bak_path)
def needs_binary_relocation(m_type, m_subtype):
@@ -412,13 +431,14 @@ def replace_prefix_bin(path_name, old_dir, new_dir):
if padding < 0:
return data
return match.group().replace(old_dir.encode('utf-8'),
- new_dir.encode('utf-8')) + b'\0' * padding
+ os.sep.encode('utf-8') * padding +
+ new_dir.encode('utf-8'))
with open(path_name, 'rb+') as f:
data = f.read()
f.seek(0)
original_data_len = len(data)
- pat = re.compile(old_dir.encode('utf-8') + b'([^\0]*?)\0')
+ pat = re.compile(old_dir.encode('utf-8'))
if not pat.search(data):
return
ndata = pat.sub(replace, data)
@@ -446,11 +466,15 @@ def replace_prefix_nullterm(path_name, old_dir, new_dir):
return data
return match.group().replace(old_dir.encode('utf-8'),
new_dir.encode('utf-8')) + b'\0' * padding
+
+ if len(new_dir) > len(old_dir):
+ raise BinaryTextReplaceException(old_dir, new_dir)
+
with open(path_name, 'rb+') as f:
data = f.read()
f.seek(0)
original_data_len = len(data)
- pat = re.compile(old_dir.encode('utf-8') + b'([^\0]*?)\0')
+ pat = re.compile(re.escape(old_dir).encode('utf-8') + b'([^\0]*?)\0')
if not pat.search(data):
return
ndata = pat.sub(replace, data)
@@ -461,80 +485,129 @@ def replace_prefix_nullterm(path_name, old_dir, new_dir):
f.truncate()
-def relocate_macho_binaries(path_names, old_dir, new_dir, allow_root):
+def relocate_macho_binaries(path_names, old_layout_root, new_layout_root,
+ prefix_to_prefix, rel, old_prefix, new_prefix):
"""
- Change old_dir to new_dir in LC_RPATH of mach-o files (on macOS)
- Change old_dir to new_dir in LC_ID and LC_DEP of mach-o files
- Account for the case where old_dir is now a placeholder
+ Use macholib python package to get the rpaths, depedent libraries
+ and library identity for libraries from the MachO object. Modify them
+ with the replacement paths queried from the dictionary mapping old layout
+ prefixes to hashes and the dictionary mapping hashes to the new layout
+ prefixes.
"""
- placeholder = set_placeholder(old_dir)
+
for path_name in path_names:
+ # Corner case where macho object file ended up in the path name list
if path_name.endswith('.o'):
continue
- if new_dir == old_dir:
- continue
- if platform.system().lower() == 'darwin':
- rpaths, deps, idpath = macho_get_paths(path_name)
- # one pass to replace placeholder
- (n_rpaths,
- n_deps,
- n_idpath) = macho_replace_paths(placeholder,
- new_dir,
- rpaths,
- deps,
- idpath)
- # another pass to replace old_dir
- (new_rpaths,
- new_deps,
- new_idpath) = macho_replace_paths(old_dir,
- new_dir,
- n_rpaths,
- n_deps,
- n_idpath)
- modify_macho_object(path_name,
- rpaths, deps, idpath,
- new_rpaths, new_deps, new_idpath)
+ if rel:
+ # get the relativized paths
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the file path name in the original prefix
+ orig_path_name = re.sub(re.escape(new_prefix), old_prefix,
+ path_name)
+ # get the mapping of the relativized paths to the original
+ # normalized paths
+ rel_to_orig = macho_make_paths_normal(orig_path_name,
+ rpaths, deps,
+ idpath)
+ # replace the relativized paths with normalized paths
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, rel_to_orig)
+ else:
+ modify_object_macholib(path_name,
+ rel_to_orig)
+ # get the normalized paths in the mach-o binary
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the mapping of paths in old prefix to path in new prefix
+ paths_to_paths = macho_find_paths(rpaths, deps, idpath,
+ old_layout_root,
+ prefix_to_prefix)
+ # replace the old paths with new paths
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, paths_to_paths)
+ else:
+ modify_object_macholib(path_name,
+ paths_to_paths)
+ # get the new normalized path in the mach-o binary
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the mapping of paths to relative paths in the new prefix
+ paths_to_paths = macho_make_paths_relative(path_name,
+ new_layout_root,
+ rpaths, deps, idpath)
+ # replace the new paths with relativized paths in the new prefix
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, paths_to_paths)
+ else:
+ modify_object_macholib(path_name,
+ paths_to_paths)
else:
- modify_object_macholib(path_name, placeholder, new_dir)
- modify_object_macholib(path_name, old_dir, new_dir)
- if len(new_dir) <= len(old_dir):
- replace_prefix_nullterm(path_name, old_dir, new_dir)
+ # get the paths in the old prefix
+ rpaths, deps, idpath = macholib_get_paths(path_name)
+ # get the mapping of paths in the old prerix to the new prefix
+ paths_to_paths = macho_find_paths(rpaths, deps, idpath,
+ old_layout_root,
+ prefix_to_prefix)
+ # replace the old paths with new paths
+ if platform.system().lower() == 'darwin':
+ modify_macho_object(path_name, rpaths, deps,
+ idpath, paths_to_paths)
+ else:
+ modify_object_macholib(path_name,
+ paths_to_paths)
+
+
+def elf_find_paths(orig_rpaths, old_layout_root, prefix_to_prefix):
+ new_rpaths = list()
+ for orig_rpath in orig_rpaths:
+ if orig_rpath.startswith(old_layout_root):
+ for old_prefix, new_prefix in prefix_to_prefix.items():
+ if orig_rpath.startswith(old_prefix):
+ new_rpaths.append(re.sub(re.escape(old_prefix),
+ new_prefix, orig_rpath))
else:
- tty.warn('Cannot do a binary string replacement'
- ' with padding for %s'
- ' because %s is longer than %s' %
- (path_name, new_dir, old_dir))
+ new_rpaths.append(orig_rpath)
+ return new_rpaths
-def relocate_elf_binaries(path_names, old_dir, new_dir, allow_root):
+def relocate_elf_binaries(path_names, old_layout_root, new_layout_root,
+ prefix_to_prefix, rel, old_prefix, new_prefix):
"""
- Change old_dir to new_dir in RPATHs of elf binaries
- Account for the case where old_dir is now a placeholder
+ Use patchelf to get the original rpaths and then replace them with
+ rpaths in the new directory layout.
+ New rpaths are determined from a dictionary mapping the prefixes in the
+ old directory layout to the prefixes in the new directory layout if the
+ rpath was in the old layout root, i.e. system paths are not replaced.
"""
- placeholder = set_placeholder(old_dir)
for path_name in path_names:
orig_rpaths = get_existing_elf_rpaths(path_name)
- if orig_rpaths:
- # one pass to replace placeholder
- n_rpaths = substitute_rpath(orig_rpaths,
- placeholder, new_dir)
- # one pass to replace old_dir
- new_rpaths = substitute_rpath(n_rpaths,
- old_dir, new_dir)
+ new_rpaths = list()
+ if rel:
+ # get the file path in the old_prefix
+ orig_path_name = re.sub(re.escape(new_prefix), old_prefix,
+ path_name)
+ # get the normalized rpaths in the old prefix using the file path
+ # in the orig prefix
+ orig_norm_rpaths = get_normalized_elf_rpaths(orig_path_name,
+ orig_rpaths)
+ # get the normalize rpaths in the new prefix
+ norm_rpaths = elf_find_paths(orig_norm_rpaths, old_layout_root,
+ prefix_to_prefix)
+ # get the relativized rpaths in the new prefix
+ new_rpaths = get_relative_elf_rpaths(path_name, new_layout_root,
+ norm_rpaths)
+ modify_elf_object(path_name, new_rpaths)
+ else:
+ new_rpaths = elf_find_paths(orig_rpaths, old_layout_root,
+ prefix_to_prefix)
modify_elf_object(path_name, new_rpaths)
- if not new_dir == old_dir:
- if len(new_dir) <= len(old_dir):
- replace_prefix_bin(path_name, old_dir, new_dir)
- else:
- tty.warn('Cannot do a binary string replacement'
- ' with padding for %s'
- ' because %s is longer than %s.' %
- (path_name, new_dir, old_dir))
def make_link_relative(cur_path_names, orig_path_names):
"""
- Change absolute links to be relative.
+ Change absolute links to relative links.
"""
for cur_path, orig_path in zip(cur_path_names, orig_path_names):
target = os.readlink(orig_path)
@@ -544,8 +617,8 @@ def make_link_relative(cur_path_names, orig_path_names):
os.symlink(relative_target, cur_path)
-def make_macho_binaries_relative(cur_path_names, orig_path_names, old_dir,
- allow_root):
+def make_macho_binaries_relative(cur_path_names, orig_path_names,
+ old_layout_root):
"""
Replace old RPATHs with paths relative to old_dir in binary files
"""
@@ -554,33 +627,26 @@ def make_macho_binaries_relative(cur_path_names, orig_path_names, old_dir,
deps = set()
idpath = None
if platform.system().lower() == 'darwin':
- (rpaths, deps, idpath) = macho_get_paths(cur_path)
- (new_rpaths,
- new_deps,
- new_idpath) = macho_make_paths_relative(orig_path, old_dir,
- rpaths, deps, idpath)
+ (rpaths, deps, idpath) = macholib_get_paths(cur_path)
+ paths_to_paths = macho_make_paths_relative(orig_path,
+ old_layout_root,
+ rpaths, deps, idpath)
modify_macho_object(cur_path,
rpaths, deps, idpath,
- new_rpaths, new_deps, new_idpath)
- if (not allow_root and
- not file_is_relocatable(cur_path)):
- raise InstallRootStringException(cur_path, old_dir)
+ paths_to_paths)
-def make_elf_binaries_relative(cur_path_names, orig_path_names, old_dir,
- allow_root):
+def make_elf_binaries_relative(cur_path_names, orig_path_names,
+ old_layout_root):
"""
Replace old RPATHs with paths relative to old_dir in binary files
"""
for cur_path, orig_path in zip(cur_path_names, orig_path_names):
orig_rpaths = get_existing_elf_rpaths(cur_path)
if orig_rpaths:
- new_rpaths = get_relative_rpaths(orig_path, old_dir,
- orig_rpaths)
+ new_rpaths = get_relative_elf_rpaths(orig_path, old_layout_root,
+ orig_rpaths)
modify_elf_object(cur_path, new_rpaths)
- if (not allow_root and
- not file_is_relocatable(cur_path)):
- raise InstallRootStringException(cur_path, old_dir)
def check_files_relocatable(cur_path_names, allow_root):
@@ -594,63 +660,73 @@ def check_files_relocatable(cur_path_names, allow_root):
cur_path, spack.store.layout.root)
-def make_link_placeholder(cur_path_names, cur_dir, old_dir):
- """
- Replace old install path with placeholder in absolute links.
-
- Links in ``cur_path_names`` must link to absolute paths.
- """
- for cur_path in cur_path_names:
- placeholder = set_placeholder(spack.store.layout.root)
- placeholder_prefix = old_dir.replace(spack.store.layout.root,
- placeholder)
- cur_src = os.readlink(cur_path)
- rel_src = os.path.relpath(cur_src, cur_dir)
- new_src = os.path.join(placeholder_prefix, rel_src)
-
- os.unlink(cur_path)
- os.symlink(new_src, cur_path)
-
-
-def relocate_links(path_names, old_dir, new_dir):
- """
- Replace old path with new path in link sources.
-
- Links in ``path_names`` must link to absolute paths or placeholders.
- """
- placeholder = set_placeholder(old_dir)
- for path_name in path_names:
- old_src = os.readlink(path_name)
- # replace either placeholder or old_dir
- new_src = old_src.replace(placeholder, new_dir, 1)
- new_src = new_src.replace(old_dir, new_dir, 1)
-
- os.unlink(path_name)
- os.symlink(new_src, path_name)
+def relocate_links(linknames, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix, prefix_to_prefix):
+ """
+ The symbolic links in filenames are absolute links or placeholder links.
+ The old link target is read and the placeholder is replaced by the old
+ layout root. If the old link target is in the old install prefix, the new
+ link target is create by replacing the old install prefix with the new
+ install prefix.
+ """
+ placeholder = set_placeholder(old_layout_root)
+ link_names = [os.path.join(new_install_prefix, linkname)
+ for linkname in linknames]
+ for link_name in link_names:
+ link_target = os.readlink(link_name)
+ link_target = re.sub(placeholder, old_layout_root, link_target)
+ if link_target.startswith(old_install_prefix):
+ new_link_target = re.sub(
+ old_install_prefix, new_install_prefix, link_target)
+ os.unlink(link_name)
+ os.symlink(new_link_target, link_name)
+ if (os.path.isabs(link_target) and
+ not link_target.startswith(new_install_prefix)):
+ msg = 'Link target %s' % link_target
+ msg += ' for symbolic link %s is outside' % link_name
+ msg += ' of the newinstall prefix %s.\n' % new_install_prefix
+ tty.warn(msg)
+
+
+def relocate_text(path_names, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix,
+ old_spack_prefix, new_spack_prefix,
+ prefix_to_prefix):
+ """
+ Replace old paths with new paths in text files
+ including the path the the spack sbang script
+ """
+ sbangre = '#!/bin/bash %s/bin/sbang' % old_spack_prefix
+ sbangnew = '#!/bin/bash %s/bin/sbang' % new_spack_prefix
-
-def relocate_text(path_names, oldpath, newpath, oldprefix, newprefix):
- """
- Replace old path with new path in text files
- including the path the the spack sbang script.
- """
- sbangre = '#!/bin/bash %s/bin/sbang' % oldprefix
- sbangnew = '#!/bin/bash %s/bin/sbang' % newprefix
for path_name in path_names:
- replace_prefix_text(path_name, oldpath, newpath)
+ replace_prefix_text(path_name, old_install_prefix, new_install_prefix)
+ for orig_dep_prefix, new_dep_prefix in prefix_to_prefix.items():
+ replace_prefix_text(path_name, orig_dep_prefix, new_dep_prefix)
+ replace_prefix_text(path_name, old_layout_root, new_layout_root)
replace_prefix_text(path_name, sbangre, sbangnew)
- replace_prefix_text(path_name, oldprefix, newprefix)
-def substitute_rpath(orig_rpath, topdir, new_root_path):
- """
- Replace topdir with new_root_path RPATH list orig_rpath
- """
- new_rpaths = []
- for path in orig_rpath:
- new_rpath = path.replace(topdir, new_root_path)
- new_rpaths.append(new_rpath)
- return new_rpaths
+def relocate_text_bin(path_names, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix,
+ old_spack_prefix, new_spack_prefix,
+ prefix_to_prefix):
+ """
+ Replace null terminated path strings hard coded into binaries.
+ Raise an exception when the new path in longer than the old path
+ because this breaks the binary.
+ """
+ if len(new_install_prefix) <= len(old_install_prefix):
+ for path_name in path_names:
+ for old_dep_prefix, new_dep_prefix in prefix_to_prefix.items():
+ if len(new_dep_prefix) <= len(old_dep_prefix):
+ replace_prefix_bin(
+ path_name, old_dep_prefix, new_dep_prefix)
+ replace_prefix_bin(path_name, old_spack_prefix, new_spack_prefix)
+ else:
+ if len(path_names) > 0:
+ raise BinaryTextReplaceException(
+ old_install_prefix, new_install_prefix)
def is_relocatable(spec):
@@ -728,7 +804,7 @@ def file_is_relocatable(file, paths_to_relocate=None):
set_of_strings.discard(rpaths)
if platform.system().lower() == 'darwin':
if m_subtype == 'x-mach-binary':
- rpaths, deps, idpath = macho_get_paths(file)
+ rpaths, deps, idpath = macholib_get_paths(file)
set_of_strings.discard(set(rpaths))
set_of_strings.discard(set(deps))
if idpath is not None:
@@ -778,6 +854,8 @@ def mime_type(file):
file_cmd = Executable('file')
output = file_cmd('-b', '-h', '--mime-type', file, output=str, error=str)
tty.debug('[MIME_TYPE] {0} -> {1}'.format(file, output.strip()))
+ # In corner cases the output does not contain a subtype prefixed with a /
+ # In those cases add the / so the tuple can be formed.
if '/' not in output:
output += '/'
split_by_slash = output.strip().split('/')
diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py
index 1378698825..a05af2f438 100644
--- a/lib/spack/spack/schema/config.py
+++ b/lib/spack/spack/schema/config.py
@@ -55,6 +55,7 @@ properties = {
},
'source_cache': {'type': 'string'},
'misc_cache': {'type': 'string'},
+ 'connect_timeout': {'type': 'integer', 'minimum': 0},
'verify_ssl': {'type': 'boolean'},
'suppress_gpg_warnings': {'type': 'boolean'},
'install_missing_compilers': {'type': 'boolean'},
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 718b5ef14d..c6fe2da762 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -3120,7 +3120,7 @@ class Spec(object):
A copy of this spec.
Examples:
- Deep copy with dependnecies::
+ Deep copy with dependencies::
spec.copy()
spec.copy(deps=True)
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index b445638228..54d370a50d 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -493,8 +493,14 @@ class Stage(object):
spack.caches.fetch_cache.store(
self.fetcher, self.mirror_paths.storage_path)
- def cache_mirror(self, stats):
- """Perform a fetch if the resource is not already cached"""
+ def cache_mirror(self, mirror, stats):
+ """Perform a fetch if the resource is not already cached
+
+ Arguments:
+ mirror (MirrorCache): the mirror to cache this Stage's resource in
+ stats (MirrorStats): this is updated depending on whether the
+ caching operation succeeded or failed
+ """
if isinstance(self.default_fetcher, fs.BundleFetchStrategy):
# BundleFetchStrategy has no source to fetch. The associated
# fetcher does nothing but the associated stage may still exist.
@@ -505,20 +511,23 @@ class Stage(object):
# must examine the type of the fetcher.
return
- dst_root = spack.caches.mirror_cache.root
+ if (mirror.skip_unstable_versions and
+ not fs.stable_target(self.default_fetcher)):
+ return
+
absolute_storage_path = os.path.join(
- dst_root, self.mirror_paths.storage_path)
+ mirror.root, self.mirror_paths.storage_path)
if os.path.exists(absolute_storage_path):
stats.already_existed(absolute_storage_path)
else:
self.fetch()
self.check()
- spack.caches.mirror_cache.store(
+ mirror.store(
self.fetcher, self.mirror_paths.storage_path)
stats.added(absolute_storage_path)
- spack.caches.mirror_cache.symlink(self.mirror_paths)
+ mirror.symlink(self.mirror_paths)
def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded
@@ -743,7 +752,8 @@ def purge():
def get_checksums_for_versions(
- url_dict, name, first_stage_function=None, keep_stage=False):
+ url_dict, name, first_stage_function=None, keep_stage=False,
+ fetch_options=None):
"""Fetches and checksums archives from URLs.
This function is called by both ``spack checksum`` and ``spack
@@ -757,6 +767,8 @@ def get_checksums_for_versions(
first_stage_function (callable): function that takes a Stage and a URL;
this is run on the stage of the first URL downloaded
keep_stage (bool): whether to keep staging area when command completes
+ fetch_options (dict): Options used for the fetcher (such as timeout
+ or cookies)
Returns:
(str): A multi-line string containing versions and corresponding hashes
@@ -790,7 +802,12 @@ def get_checksums_for_versions(
i = 0
for url, version in zip(urls, versions):
try:
- with Stage(url, keep=keep_stage) as stage:
+ if fetch_options:
+ url_or_fs = fs.URLFetchStrategy(
+ url, fetch_options=fetch_options)
+ else:
+ url_or_fs = url
+ with Stage(url_or_fs, keep=keep_stage) as stage:
# Fetch the archive
stage.fetch()
if i == 0 and first_stage_function:
diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py
index 744821a04e..7ede78b7a5 100644
--- a/lib/spack/spack/test/build_systems.py
+++ b/lib/spack/spack/test/build_systems.py
@@ -181,3 +181,41 @@ class TestAutotoolsPackage(object):
assert '--without-bar' in options
assert '--without-baz' in options
assert '--no-fee' in options
+
+
+@pytest.mark.usefixtures('config', 'mock_packages')
+class TestCMakePackage(object):
+
+ def test_define(self):
+ s = Spec('cmake-client')
+ s.concretize()
+ pkg = spack.repo.get(s)
+
+ for cls in (list, tuple):
+ arg = pkg.define('MULTI', cls(['right', 'up']))
+ assert arg == '-DMULTI:STRING=right;up'
+
+ arg = pkg.define('ENABLE_TRUTH', False)
+ assert arg == '-DENABLE_TRUTH:BOOL=OFF'
+ arg = pkg.define('ENABLE_TRUTH', True)
+ assert arg == '-DENABLE_TRUTH:BOOL=ON'
+
+ arg = pkg.define('SINGLE', 'red')
+ assert arg == '-DSINGLE:STRING=red'
+
+ def test_define_from_variant(self):
+ s = Spec('cmake-client multi=up,right ~truthy single=red')
+ s.concretize()
+ pkg = spack.repo.get(s)
+
+ arg = pkg.define_from_variant('MULTI')
+ assert arg == '-DMULTI:STRING=right;up'
+
+ arg = pkg.define_from_variant('ENABLE_TRUTH', 'truthy')
+ assert arg == '-DENABLE_TRUTH:BOOL=OFF'
+
+ arg = pkg.define_from_variant('SINGLE')
+ assert arg == '-DSINGLE:STRING=red'
+
+ with pytest.raises(KeyError, match="not a variant"):
+ pkg.define_from_variant('NONEXISTENT')
diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py
index 064daeb063..03d09b9771 100644
--- a/lib/spack/spack/test/cmd/buildcache.py
+++ b/lib/spack/spack/test/cmd/buildcache.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import errno
import platform
import pytest
@@ -11,6 +12,7 @@ import spack.main
import spack.binary_distribution
buildcache = spack.main.SpackCommand('buildcache')
+install = spack.main.SpackCommand('install')
@pytest.fixture()
@@ -41,3 +43,16 @@ def test_buildcache_list_duplicates(mock_get_specs, capsys):
output = buildcache('list', 'mpileaks', '@2.3')
assert output.count('mpileaks') == 3
+
+
+def test_buildcache_create_fail_on_perm_denied(
+ install_mockery, mock_fetch, monkeypatch, tmpdir):
+ """Ensure that buildcache create fails on permission denied error."""
+ install('trivial-install-test-package')
+
+ tmpdir.chmod(0)
+ with pytest.raises(OSError) as error:
+ buildcache('create', '-d', str(tmpdir),
+ '--unsigned', 'trivial-install-test-package')
+ assert error.value.errno == errno.EACCES
+ tmpdir.chmod(0o700)
diff --git a/lib/spack/spack/test/cmd/dependencies.py b/lib/spack/spack/test/cmd/dependencies.py
index fc47069181..05d0556936 100644
--- a/lib/spack/spack/test/cmd/dependencies.py
+++ b/lib/spack/spack/test/cmd/dependencies.py
@@ -17,7 +17,7 @@ mpis = ['mpich', 'mpich2', 'multi-provider-mpi', 'zmpi']
mpi_deps = ['fake']
-def test_immediate_dependencies(mock_packages):
+def test_direct_dependencies(mock_packages):
out = dependencies('mpileaks')
actual = set(re.split(r'\s+', out.strip()))
expected = set(['callpath'] + mpis)
@@ -47,7 +47,7 @@ def test_transitive_dependencies_with_deptypes(mock_packages):
@pytest.mark.db
-def test_immediate_installed_dependencies(mock_packages, database):
+def test_direct_installed_dependencies(mock_packages, database):
with color_when(False):
out = dependencies('--installed', 'mpileaks^mpich')
diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py
index d62d7df432..4bb4fad224 100644
--- a/lib/spack/spack/test/cmd/mirror.py
+++ b/lib/spack/spack/test/cmd/mirror.py
@@ -66,6 +66,29 @@ def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config,
assert mirror_res == expected
+@pytest.fixture
+def source_for_pkg_with_hash(mock_packages, tmpdir):
+ pkg = spack.repo.get('trivial-pkg-with-valid-hash')
+ local_url_basename = os.path.basename(pkg.url)
+ local_path = os.path.join(str(tmpdir), local_url_basename)
+ with open(local_path, 'w') as f:
+ f.write(pkg.hashed_content)
+ local_url = "file://" + local_path
+ pkg.versions[spack.version.Version('1.0')]['url'] = local_url
+
+
+def test_mirror_skip_unstable(tmpdir_factory, mock_packages, config,
+ source_for_pkg_with_hash):
+ mirror_dir = str(tmpdir_factory.mktemp('mirror-dir'))
+
+ specs = [spack.spec.Spec(x).concretized() for x in
+ ['git-test', 'trivial-pkg-with-valid-hash']]
+ spack.mirror.create(mirror_dir, specs, skip_unstable_versions=True)
+
+ assert (set(os.listdir(mirror_dir)) - set(['_source-cache']) ==
+ set(['trivial-pkg-with-valid-hash']))
+
+
def test_mirror_crud(tmp_scope, capsys):
with capsys.disabled():
mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index feb2b9cae4..b8598616d5 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -46,7 +46,19 @@ config_merge_list = {
config_override_list = {
'config': {
- 'build_stage:': ['patha', 'pathb']}}
+ 'build_stage:': ['pathd', 'pathe']}}
+
+config_merge_dict = {
+ 'config': {
+ 'info': {
+ 'a': 3,
+ 'b': 4}}}
+
+config_override_dict = {
+ 'config': {
+ 'info:': {
+ 'a': 7,
+ 'c': 9}}}
@pytest.fixture()
@@ -382,7 +394,7 @@ def test_read_config_override_list(mock_low_high_config, write_config_file):
write_config_file('config', config_override_list, 'high')
assert spack.config.get('config') == {
'install_tree': 'install_tree_path',
- 'build_stage': ['patha', 'pathb']
+ 'build_stage': config_override_list['config']['build_stage:']
}
@@ -857,3 +869,74 @@ def test_dotkit_in_config_does_not_raise(
# we throw a a deprecation warning without raising
assert '_sp_sys_type' in captured[0] # stdout
assert 'Warning' in captured[1] # stderr
+
+
+def test_internal_config_section_override(mock_low_high_config,
+ write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope
+ ('high', {
+ 'config:': {
+ 'build_stage': wanted_list
+ }
+ }))
+ assert mock_low_high_config.get('config:build_stage') == wanted_list
+
+
+def test_internal_config_dict_override(mock_low_high_config,
+ write_config_file):
+ write_config_file('config', config_merge_dict, 'low')
+ wanted_dict = config_override_dict['config']['info:']
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope
+ ('high', config_override_dict))
+ assert mock_low_high_config.get('config:info') == wanted_dict
+
+
+def test_internal_config_list_override(mock_low_high_config,
+ write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ mock_low_high_config.push_scope(spack.config.InternalConfigScope
+ ('high', config_override_list))
+ assert mock_low_high_config.get('config:build_stage') == wanted_list
+
+
+def test_set_section_override(mock_low_high_config, write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ with spack.config.override('config::build_stage', wanted_list):
+ assert mock_low_high_config.get('config:build_stage') == wanted_list
+ assert config_merge_list['config']['build_stage'] == \
+ mock_low_high_config.get('config:build_stage')
+
+
+def test_set_list_override(mock_low_high_config, write_config_file):
+ write_config_file('config', config_merge_list, 'low')
+ wanted_list = config_override_list['config']['build_stage:']
+ with spack.config.override('config:build_stage:', wanted_list):
+ assert wanted_list == mock_low_high_config.get('config:build_stage')
+ assert config_merge_list['config']['build_stage'] == \
+ mock_low_high_config.get('config:build_stage')
+
+
+def test_set_dict_override(mock_low_high_config, write_config_file):
+ write_config_file('config', config_merge_dict, 'low')
+ wanted_dict = config_override_dict['config']['info:']
+ with spack.config.override('config:info:', wanted_dict):
+ assert wanted_dict == mock_low_high_config.get('config:info')
+ assert config_merge_dict['config']['info'] == \
+ mock_low_high_config.get('config:info')
+
+
+def test_set_bad_path(config):
+ with pytest.raises(syaml.SpackYAMLError, match='Illegal leading'):
+ with spack.config.override(':bad:path', ''):
+ pass
+
+
+def test_bad_path_double_override(config):
+ with pytest.raises(syaml.SpackYAMLError,
+ match='Meaningless second override'):
+ with spack.config.override('bad::double:override::directive', ''):
+ pass
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 04e870d336..6703742142 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -525,6 +525,8 @@ def database(mock_store, mock_packages, config):
"""This activates the mock store, packages, AND config."""
with use_store(mock_store):
yield mock_store.db
+ # Force reading the database again between tests
+ mock_store.db.last_seen_verifier = ''
@pytest.fixture(scope='function')
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 88e6c42693..28311c7501 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -13,6 +13,12 @@ import multiprocessing
import os
import pytest
import json
+try:
+ import uuid
+ _use_uuid = True
+except ImportError:
+ _use_uuid = False
+ pass
import llnl.util.lock as lk
from llnl.util.tty.colify import colify
@@ -469,6 +475,21 @@ def test_015_write_and_read(mutable_database):
assert new_rec.installed == rec.installed
+def test_017_write_and_read_without_uuid(mutable_database, monkeypatch):
+ monkeypatch.setattr(spack.database, '_use_uuid', False)
+ # write and read DB
+ with spack.store.db.write_transaction():
+ specs = spack.store.db.query()
+ recs = [spack.store.db.get_record(s) for s in specs]
+
+ for spec, rec in zip(specs, recs):
+ new_rec = spack.store.db.get_record(spec)
+ assert new_rec.ref_count == rec.ref_count
+ assert new_rec.spec == rec.spec
+ assert new_rec.path == rec.path
+ assert new_rec.installed == rec.installed
+
+
def test_020_db_sanity(database):
"""Make sure query() returns what's actually in the db."""
_check_db_sanity(database)
@@ -703,6 +724,9 @@ def test_old_external_entries_prefix(mutable_database):
with open(spack.store.db._index_path, 'w') as f:
f.write(json.dumps(db_obj))
+ if _use_uuid:
+ with open(spack.store.db._verifier_path, 'w') as f:
+ f.write(str(uuid.uuid4()))
record = spack.store.db.get_record(s)
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index 08b32f74f1..570c329b71 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -213,7 +213,7 @@ def test_mirror_cache_symlinks(tmpdir):
"""
cosmetic_path = 'zlib/zlib-1.2.11.tar.gz'
global_path = '_source-cache/archive/c3/c3e5.tar.gz'
- cache = spack.caches.MirrorCache(str(tmpdir))
+ cache = spack.caches.MirrorCache(str(tmpdir), False)
reference = spack.mirror.MirrorReference(cosmetic_path, global_path)
cache.store(MockFetcher(), reference.storage_path)
diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py
index b3351ffb49..d540ac663e 100644
--- a/lib/spack/spack/test/package_class.py
+++ b/lib/spack/spack/test/package_class.py
@@ -11,12 +11,17 @@ static DSL metadata for packages.
"""
import pytest
+import spack.package
import spack.repo
-@pytest.fixture
-def mpileaks_possible_deps(mock_packages):
- mpi_names = [spec.name for spec in spack.repo.path.providers_for('mpi')]
+@pytest.fixture(scope="module")
+def mpi_names(mock_repo_path):
+ return [spec.name for spec in mock_repo_path.providers_for('mpi')]
+
+
+@pytest.fixture()
+def mpileaks_possible_deps(mock_packages, mpi_names):
possible = {
'callpath': set(['dyninst'] + mpi_names),
'dyninst': set(['libdwarf', 'libelf']),
@@ -34,47 +39,72 @@ def mpileaks_possible_deps(mock_packages):
def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
mpileaks = spack.repo.get('mpileaks')
- assert (mpileaks.possible_dependencies(expand_virtuals=True) ==
- mpileaks_possible_deps)
+ assert mpileaks_possible_deps == (
+ mpileaks.possible_dependencies(expand_virtuals=True))
- assert mpileaks.possible_dependencies(expand_virtuals=False) == {
- 'callpath': set(['dyninst']),
+ assert {
+ 'callpath': set(['dyninst', 'mpi']),
'dyninst': set(['libdwarf', 'libelf']),
'libdwarf': set(['libelf']),
'libelf': set(),
'mpi': set(),
- 'mpileaks': set(['callpath']),
- }
+ 'mpileaks': set(['callpath', 'mpi']),
+ } == mpileaks.possible_dependencies(expand_virtuals=False)
+
+
+def test_possible_direct_dependencies(mock_packages, mpileaks_possible_deps):
+ mpileaks = spack.repo.get('mpileaks')
+ deps = mpileaks.possible_dependencies(transitive=False,
+ expand_virtuals=False)
+
+ assert {
+ 'callpath': set(),
+ 'mpi': set(),
+ 'mpileaks': set(['callpath', 'mpi']),
+ } == deps
+
+
+def test_possible_dependencies_virtual(mock_packages, mpi_names):
+ expected = dict(
+ (name, set(spack.repo.get(name).dependencies))
+ for name in mpi_names
+ )
+
+ # only one mock MPI has a dependency
+ expected['fake'] = set()
+
+ assert expected == spack.package.possible_dependencies(
+ "mpi", transitive=False)
def test_possible_dependencies_missing(mock_packages):
md = spack.repo.get("missing-dependency")
missing = {}
md.possible_dependencies(transitive=True, missing=missing)
- assert missing["missing-dependency"] == set([
+ assert set([
"this-is-a-missing-dependency"
- ])
+ ]) == missing["missing-dependency"]
def test_possible_dependencies_with_deptypes(mock_packages):
dtbuild1 = spack.repo.get('dtbuild1')
- assert dtbuild1.possible_dependencies(deptype=('link', 'run')) == {
+ assert {
'dtbuild1': set(['dtrun2', 'dtlink2']),
'dtlink2': set(),
'dtrun2': set(),
- }
+ } == dtbuild1.possible_dependencies(deptype=('link', 'run'))
- assert dtbuild1.possible_dependencies(deptype=('build')) == {
+ assert {
'dtbuild1': set(['dtbuild2', 'dtlink2']),
'dtbuild2': set(),
'dtlink2': set(),
- }
+ } == dtbuild1.possible_dependencies(deptype=('build'))
- assert dtbuild1.possible_dependencies(deptype=('link')) == {
+ assert {
'dtbuild1': set(['dtlink2']),
'dtlink2': set(),
- }
+ } == dtbuild1.possible_dependencies(deptype=('link'))
def test_possible_dependencies_with_multiple_classes(
@@ -88,4 +118,4 @@ def test_possible_dependencies_with_multiple_classes(
'dt-diamond-bottom': set(),
})
- assert spack.package.possible_dependencies(*pkgs) == expected
+ assert expected == spack.package.possible_dependencies(*pkgs)
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index 299c56481e..ffaad396c1 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -402,3 +402,24 @@ def test_bundle_patch_directive(mock_directive_bundle,
match="Patches are not allowed"):
patch = spack.directives.patch('mock/patch.txt')
patch(mock_directive_bundle)
+
+
+def test_fetch_options(mock_packages, config):
+ """Test fetch options inference."""
+
+ pkg = spack.repo.get('fetch-options')
+
+ fetcher = spack.fetch_strategy.for_package_version(pkg, '1.0')
+ assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
+ assert fetcher.digest == 'abc10'
+ assert fetcher.extra_options == {'timeout': 42, 'cookie': 'foobar'}
+
+ fetcher = spack.fetch_strategy.for_package_version(pkg, '1.1')
+ assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
+ assert fetcher.digest == 'abc11'
+ assert fetcher.extra_options == {'timeout': 65}
+
+ fetcher = spack.fetch_strategy.for_package_version(pkg, '1.2')
+ assert isinstance(fetcher, spack.fetch_strategy.URLFetchStrategy)
+ assert fetcher.digest == 'abc12'
+ assert fetcher.extra_options == {'cookie': 'baz'}
diff --git a/lib/spack/spack/test/packaging.py b/lib/spack/spack/test/packaging.py
index 39d12df7b7..39da7c3ae5 100644
--- a/lib/spack/spack/test/packaging.py
+++ b/lib/spack/spack/test/packaging.py
@@ -8,10 +8,11 @@ This test checks the binary packaging infrastructure
"""
import os
import stat
-import sys
import shutil
import pytest
import argparse
+import re
+import platform
from llnl.util.filesystem import mkdirp
@@ -19,16 +20,17 @@ import spack.repo
import spack.store
import spack.binary_distribution as bindist
import spack.cmd.buildcache as buildcache
-import spack.util.gpg
from spack.spec import Spec
from spack.paths import mock_gpg_keys_path
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
from spack.relocate import needs_binary_relocation, needs_text_relocation
-from spack.relocate import strings_contains_installroot
-from spack.relocate import get_patchelf, relocate_text, relocate_links
-from spack.relocate import substitute_rpath, get_relative_rpaths
-from spack.relocate import macho_replace_paths, macho_make_paths_relative
-from spack.relocate import modify_macho_object, macho_get_paths
+from spack.relocate import relocate_text, relocate_links
+from spack.relocate import get_relative_elf_rpaths
+from spack.relocate import get_normalized_elf_rpaths
+from spack.relocate import macho_make_paths_relative
+from spack.relocate import macho_make_paths_normal
+from spack.relocate import set_placeholder, macho_find_paths
+from spack.relocate import file_is_relocatable
def has_gpg():
@@ -50,9 +52,9 @@ def fake_fetchify(url, pkg):
@pytest.mark.usefixtures('install_mockery', 'mock_gnupghome')
def test_buildcache(mock_archive, tmpdir):
# tweak patchelf to only do a download
- spec = Spec("patchelf")
- spec.concretize()
- pkg = spack.repo.get(spec)
+ pspec = Spec("patchelf")
+ pspec.concretize()
+ pkg = spack.repo.get(pspec)
fake_fetchify(pkg.fetcher, pkg)
mkdirp(os.path.join(pkg.prefix, "bin"))
patchelfscr = os.path.join(pkg.prefix, "bin", "patchelf")
@@ -71,7 +73,7 @@ echo $PATH"""
pkg = spec.package
fake_fetchify(mock_archive.url, pkg)
pkg.do_install()
- pkghash = '/' + spec.dag_hash(7)
+ pkghash = '/' + str(spec.dag_hash(7))
# Put some non-relocatable file in there
filename = os.path.join(spec.prefix, "dummy.txt")
@@ -99,88 +101,69 @@ echo $PATH"""
parser = argparse.ArgumentParser()
buildcache.setup_parser(parser)
+ create_args = ['create', '-a', '-f', '-d', mirror_path, pkghash]
# Create a private key to sign package with if gpg2 available
if spack.util.gpg.Gpg.gpg():
spack.util.gpg.Gpg.create(name='test key 1', expires='0',
email='spack@googlegroups.com',
comment='Spack test key')
- # Create build cache with signing
- args = parser.parse_args(['create', '-d', mirror_path, str(spec)])
- buildcache.buildcache(parser, args)
-
- # Uninstall the package
- pkg.do_uninstall(force=True)
-
- # test overwrite install
- args = parser.parse_args(['install', '-f', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- files = os.listdir(spec.prefix)
+ else:
+ create_args.insert(create_args.index('-a'), '-u')
- # create build cache with relative path and signing
- args = parser.parse_args(
- ['create', '-d', mirror_path, '-f', '-r', str(spec)])
- buildcache.buildcache(parser, args)
+ args = parser.parse_args(create_args)
+ buildcache.buildcache(parser, args)
+ # trigger overwrite warning
+ buildcache.buildcache(parser, args)
- # Uninstall the package
- pkg.do_uninstall(force=True)
+ # Uninstall the package
+ pkg.do_uninstall(force=True)
- # install build cache with verification
- args = parser.parse_args(['install', str(spec)])
- buildcache.install_tarball(spec, args)
+ install_args = ['install', '-a', '-f', pkghash]
+ if not spack.util.gpg.Gpg.gpg():
+ install_args.insert(install_args.index('-a'), '-u')
+ args = parser.parse_args(install_args)
+ # Test install
+ buildcache.buildcache(parser, args)
- # test overwrite install
- args = parser.parse_args(['install', '-f', str(pkghash)])
- buildcache.buildcache(parser, args)
+ files = os.listdir(spec.prefix)
- else:
- # create build cache without signing
- args = parser.parse_args(
- ['create', '-d', mirror_path, '-f', '-u', str(spec)])
- buildcache.buildcache(parser, args)
-
- # Uninstall the package
- pkg.do_uninstall(force=True)
-
- # install build cache without verification
- args = parser.parse_args(['install', '-u', str(spec)])
- buildcache.install_tarball(spec, args)
-
- files = os.listdir(spec.prefix)
- assert 'link_to_dummy.txt' in files
- assert 'dummy.txt' in files
- # test overwrite install without verification
- args = parser.parse_args(['install', '-f', '-u', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- # create build cache with relative path
- args = parser.parse_args(
- ['create', '-d', mirror_path, '-f', '-r', '-u', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- # Uninstall the package
- pkg.do_uninstall(force=True)
-
- # install build cache
- args = parser.parse_args(['install', '-u', str(spec)])
- buildcache.install_tarball(spec, args)
-
- # test overwrite install
- args = parser.parse_args(['install', '-f', '-u', str(pkghash)])
- buildcache.buildcache(parser, args)
-
- files = os.listdir(spec.prefix)
- assert 'link_to_dummy.txt' in files
- assert 'dummy.txt' in files
- assert os.path.realpath(
- os.path.join(spec.prefix, 'link_to_dummy.txt')
- ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
+ assert 'link_to_dummy.txt' in files
+ assert 'dummy.txt' in files
# Validate the relocation information
buildinfo = bindist.read_buildinfo_file(spec.prefix)
assert(buildinfo['relocate_textfiles'] == ['dummy.txt'])
assert(buildinfo['relocate_links'] == ['link_to_dummy.txt'])
+ # create build cache with relative path
+ create_args.insert(create_args.index('-a'), '-f')
+ create_args.insert(create_args.index('-a'), '-r')
+ args = parser.parse_args(create_args)
+ buildcache.buildcache(parser, args)
+
+ # Uninstall the package
+ pkg.do_uninstall(force=True)
+
+ if not spack.util.gpg.Gpg.gpg():
+ install_args.insert(install_args.index('-a'), '-u')
+ args = parser.parse_args(install_args)
+ buildcache.buildcache(parser, args)
+
+ # test overwrite install
+ install_args.insert(install_args.index('-a'), '-f')
+ args = parser.parse_args(install_args)
+ buildcache.buildcache(parser, args)
+
+ files = os.listdir(spec.prefix)
+ assert 'link_to_dummy.txt' in files
+ assert 'dummy.txt' in files
+# assert os.path.realpath(
+# os.path.join(spec.prefix, 'link_to_dummy.txt')
+# ) == os.path.realpath(os.path.join(spec.prefix, 'dummy.txt'))
+
+ args = parser.parse_args(['keys'])
+ buildcache.buildcache(parser, args)
+
args = parser.parse_args(['list'])
buildcache.buildcache(parser, args)
@@ -200,6 +183,9 @@ echo $PATH"""
args = parser.parse_args(['keys', '-f'])
buildcache.buildcache(parser, args)
+ args = parser.parse_args(['keys', '-i', '-t'])
+ buildcache.buildcache(parser, args)
+
# unregister mirror with spack config
mirrors = {}
spack.config.set('mirrors', mirrors)
@@ -210,7 +196,10 @@ echo $PATH"""
bindist._cached_specs = set()
+@pytest.mark.usefixtures('install_mockery')
def test_relocate_text(tmpdir):
+ spec = Spec('trivial-install-test-package')
+ spec.concretize()
with tmpdir.as_cwd():
# Validate the text path replacement
old_dir = '/home/spack/opt/spack'
@@ -220,24 +209,46 @@ def test_relocate_text(tmpdir):
script.close()
filenames = [filename]
new_dir = '/opt/rh/devtoolset/'
- relocate_text(filenames, oldpath=old_dir, newpath=new_dir,
- oldprefix=old_dir, newprefix=new_dir)
+ relocate_text(filenames, old_dir, new_dir,
+ old_dir, new_dir,
+ old_dir, new_dir,
+ {old_dir: new_dir})
with open(filename, "r")as script:
for line in script:
assert(new_dir in line)
- assert(strings_contains_installroot(filename, old_dir) is False)
+ assert(file_is_relocatable(os.path.realpath(filename)))
+ # Remove cached binary specs since we deleted the mirror
+ bindist._cached_specs = set()
def test_relocate_links(tmpdir):
with tmpdir.as_cwd():
- old_dir = '/home/spack/opt/spack'
- filename = 'link.ln'
- old_src = os.path.join(old_dir, filename)
- os.symlink(old_src, filename)
- filenames = [filename]
- new_dir = '/opt/rh/devtoolset'
- relocate_links(filenames, old_dir, new_dir)
- assert os.path.realpath(filename) == os.path.join(new_dir, filename)
+ old_layout_root = os.path.join(
+ '%s' % tmpdir, 'home', 'spack', 'opt', 'spack')
+ old_install_prefix = os.path.join(
+ '%s' % old_layout_root, 'debian6', 'test')
+ old_binname = os.path.join(old_install_prefix, 'binfile')
+ placeholder = set_placeholder(old_layout_root)
+ re.sub(old_layout_root, placeholder, old_binname)
+ filenames = ['link.ln', 'outsideprefix.ln']
+ new_layout_root = os.path.join(
+ '%s' % tmpdir, 'opt', 'rh', 'devtoolset')
+ new_install_prefix = os.path.join(
+ '%s' % new_layout_root, 'test', 'debian6')
+ new_linkname = os.path.join(new_install_prefix, 'link.ln')
+ new_linkname2 = os.path.join(new_install_prefix, 'outsideprefix.ln')
+ new_binname = os.path.join(new_install_prefix, 'binfile')
+ mkdirp(new_install_prefix)
+ with open(new_binname, 'w') as f:
+ f.write('\n')
+ os.utime(new_binname, None)
+ os.symlink(old_binname, new_linkname)
+ os.symlink('/usr/lib/libc.so', new_linkname2)
+ relocate_links(filenames, old_layout_root, new_layout_root,
+ old_install_prefix, new_install_prefix,
+ {old_install_prefix: new_install_prefix})
+ assert os.readlink(new_linkname) == new_binname
+ assert os.readlink(new_linkname2) == '/usr/lib/libc.so'
def test_needs_relocation():
@@ -246,16 +257,223 @@ def test_needs_relocation():
assert needs_binary_relocation('application', 'x-executable')
assert not needs_binary_relocation('application', 'x-octet-stream')
assert not needs_binary_relocation('text', 'x-')
-
assert needs_text_relocation('text', 'x-')
assert not needs_text_relocation('symbolic link to', 'x-')
assert needs_binary_relocation('application', 'x-mach-binary')
-def test_macho_paths():
-
- out = macho_make_paths_relative('/Users/Shares/spack/pkgC/lib/libC.dylib',
+def test_replace_paths(tmpdir):
+ with tmpdir.as_cwd():
+ suffix = 'dylib' if platform.system().lower() == 'darwin' else 'so'
+ hash_a = '53moz6jwnw3xpiztxwhc4us26klribws'
+ hash_b = 'tk62dzu62kd4oh3h3heelyw23hw2sfee'
+ hash_c = 'hdkhduizmaddpog6ewdradpobnbjwsjl'
+ hash_d = 'hukkosc7ahff7o65h6cdhvcoxm57d4bw'
+ hash_loco = 'zy4oigsc4eovn5yhr2lk4aukwzoespob'
+
+ prefix2hash = dict()
+
+ old_spack_dir = os.path.join('%s' % tmpdir,
+ 'Users', 'developer', 'spack')
+ mkdirp(old_spack_dir)
+
+ oldprefix_a = os.path.join('%s' % old_spack_dir, 'pkgA-%s' % hash_a)
+ oldlibdir_a = os.path.join('%s' % oldprefix_a, 'lib')
+ mkdirp(oldlibdir_a)
+ prefix2hash[str(oldprefix_a)] = hash_a
+
+ oldprefix_b = os.path.join('%s' % old_spack_dir, 'pkgB-%s' % hash_b)
+ oldlibdir_b = os.path.join('%s' % oldprefix_b, 'lib')
+ mkdirp(oldlibdir_b)
+ prefix2hash[str(oldprefix_b)] = hash_b
+
+ oldprefix_c = os.path.join('%s' % old_spack_dir, 'pkgC-%s' % hash_c)
+ oldlibdir_c = os.path.join('%s' % oldprefix_c, 'lib')
+ oldlibdir_cc = os.path.join('%s' % oldlibdir_c, 'C')
+ mkdirp(oldlibdir_c)
+ prefix2hash[str(oldprefix_c)] = hash_c
+
+ oldprefix_d = os.path.join('%s' % old_spack_dir, 'pkgD-%s' % hash_d)
+ oldlibdir_d = os.path.join('%s' % oldprefix_d, 'lib')
+ mkdirp(oldlibdir_d)
+ prefix2hash[str(oldprefix_d)] = hash_d
+
+ oldprefix_local = os.path.join('%s' % tmpdir, 'usr', 'local')
+ oldlibdir_local = os.path.join('%s' % oldprefix_local, 'lib')
+ mkdirp(oldlibdir_local)
+ prefix2hash[str(oldprefix_local)] = hash_loco
+ libfile_a = 'libA.%s' % suffix
+ libfile_b = 'libB.%s' % suffix
+ libfile_c = 'libC.%s' % suffix
+ libfile_d = 'libD.%s' % suffix
+ libfile_loco = 'libloco.%s' % suffix
+ old_libnames = [os.path.join(oldlibdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b),
+ os.path.join(oldlibdir_c, libfile_c),
+ os.path.join(oldlibdir_d, libfile_d),
+ os.path.join(oldlibdir_local, libfile_loco)]
+
+ for old_libname in old_libnames:
+ with open(old_libname, 'a'):
+ os.utime(old_libname, None)
+
+ hash2prefix = dict()
+
+ new_spack_dir = os.path.join('%s' % tmpdir, 'Users', 'Shared',
+ 'spack')
+ mkdirp(new_spack_dir)
+
+ prefix_a = os.path.join(new_spack_dir, 'pkgA-%s' % hash_a)
+ libdir_a = os.path.join(prefix_a, 'lib')
+ mkdirp(libdir_a)
+ hash2prefix[hash_a] = str(prefix_a)
+
+ prefix_b = os.path.join(new_spack_dir, 'pkgB-%s' % hash_b)
+ libdir_b = os.path.join(prefix_b, 'lib')
+ mkdirp(libdir_b)
+ hash2prefix[hash_b] = str(prefix_b)
+
+ prefix_c = os.path.join(new_spack_dir, 'pkgC-%s' % hash_c)
+ libdir_c = os.path.join(prefix_c, 'lib')
+ libdir_cc = os.path.join(libdir_c, 'C')
+ mkdirp(libdir_cc)
+ hash2prefix[hash_c] = str(prefix_c)
+
+ prefix_d = os.path.join(new_spack_dir, 'pkgD-%s' % hash_d)
+ libdir_d = os.path.join(prefix_d, 'lib')
+ mkdirp(libdir_d)
+ hash2prefix[hash_d] = str(prefix_d)
+
+ prefix_local = os.path.join('%s' % tmpdir, 'usr', 'local')
+ libdir_local = os.path.join(prefix_local, 'lib')
+ mkdirp(libdir_local)
+ hash2prefix[hash_loco] = str(prefix_local)
+
+ new_libnames = [os.path.join(libdir_a, libfile_a),
+ os.path.join(libdir_b, libfile_b),
+ os.path.join(libdir_cc, libfile_c),
+ os.path.join(libdir_d, libfile_d),
+ os.path.join(libdir_local, libfile_loco)]
+
+ for new_libname in new_libnames:
+ with open(new_libname, 'a'):
+ os.utime(new_libname, None)
+
+ prefix2prefix = dict()
+ for prefix, hash in prefix2hash.items():
+ prefix2prefix[prefix] = hash2prefix[hash]
+
+ out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
+ oldlibdir_c,
+ oldlibdir_cc, oldlibdir_local],
+ [os.path.join(oldlibdir_a,
+ libfile_a),
+ os.path.join(oldlibdir_b,
+ libfile_b),
+ os.path.join(oldlibdir_local,
+ libfile_loco)],
+ os.path.join(oldlibdir_cc,
+ libfile_c),
+ old_spack_dir,
+ prefix2prefix
+ )
+ assert out_dict == {oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ os.path.join(oldlibdir_a, libfile_a):
+ os.path.join(libdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b):
+ os.path.join(libdir_b, libfile_b),
+ os.path.join(oldlibdir_local, libfile_loco):
+ os.path.join(libdir_local, libfile_loco),
+ os.path.join(oldlibdir_cc, libfile_c):
+ os.path.join(libdir_cc, libfile_c)}
+
+ out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
+ oldlibdir_c,
+ oldlibdir_cc,
+ oldlibdir_local],
+ [os.path.join(oldlibdir_a,
+ libfile_a),
+ os.path.join(oldlibdir_b,
+ libfile_b),
+ os.path.join(oldlibdir_cc,
+ libfile_c),
+ os.path.join(oldlibdir_local,
+ libfile_loco)],
+ None,
+ old_spack_dir,
+ prefix2prefix
+ )
+ assert out_dict == {oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ os.path.join(oldlibdir_a, libfile_a):
+ os.path.join(libdir_a, libfile_a),
+ os.path.join(oldlibdir_b, libfile_b):
+ os.path.join(libdir_b, libfile_b),
+ os.path.join(oldlibdir_local, libfile_loco):
+ os.path.join(libdir_local, libfile_loco),
+ os.path.join(oldlibdir_cc, libfile_c):
+ os.path.join(libdir_cc, libfile_c)}
+
+ out_dict = macho_find_paths([oldlibdir_a, oldlibdir_b,
+ oldlibdir_c, oldlibdir_cc,
+ oldlibdir_local],
+ ['@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_c,
+ '@rpath/%s' % libfile_loco],
+ None,
+ old_spack_dir,
+ prefix2prefix
+ )
+
+ assert out_dict == {'@rpath/%s' % libfile_a:
+ '@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b:
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_c:
+ '@rpath/%s' % libfile_c,
+ '@rpath/%s' % libfile_loco:
+ '@rpath/%s' % libfile_loco,
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_c: libdir_c,
+ oldlibdir_cc: libdir_cc,
+ libdir_local: libdir_local,
+ }
+
+ out_dict = macho_find_paths([oldlibdir_a,
+ oldlibdir_b,
+ oldlibdir_d,
+ oldlibdir_local],
+ ['@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_loco],
+ None,
+ old_spack_dir,
+ prefix2prefix)
+ assert out_dict == {'@rpath/%s' % libfile_a:
+ '@rpath/%s' % libfile_a,
+ '@rpath/%s' % libfile_b:
+ '@rpath/%s' % libfile_b,
+ '@rpath/%s' % libfile_loco:
+ '@rpath/%s' % libfile_loco,
+ oldlibdir_a: libdir_a,
+ oldlibdir_b: libdir_b,
+ oldlibdir_d: libdir_d,
+ libdir_local: libdir_local,
+ }
+
+
+def test_macho_make_paths():
+ out = macho_make_paths_relative('/Users/Shared/spack/pkgC/lib/libC.dylib',
'/Users/Shared/spack',
('/Users/Shared/spack/pkgA/lib',
'/Users/Shared/spack/pkgB/lib',
@@ -264,13 +482,43 @@ def test_macho_paths():
'/Users/Shared/spack/pkgB/libB.dylib',
'/usr/local/lib/libloco.dylib'),
'/Users/Shared/spack/pkgC/lib/libC.dylib')
- assert out == (['@loader_path/../../../../Shared/spack/pkgA/lib',
- '@loader_path/../../../../Shared/spack/pkgB/lib',
- '/usr/local/lib'],
- ['@loader_path/../../../../Shared/spack/pkgA/libA.dylib',
- '@loader_path/../../../../Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'],
- '@rpath/libC.dylib')
+ assert out == {'/Users/Shared/spack/pkgA/lib':
+ '@loader_path/../../pkgA/lib',
+ '/Users/Shared/spack/pkgB/lib':
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '/Users/Shared/spack/pkgA/libA.dylib':
+ '@loader_path/../../pkgA/libA.dylib',
+ '/Users/Shared/spack/pkgB/libB.dylib':
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib',
+ '/Users/Shared/spack/pkgC/lib/libC.dylib':
+ '@rpath/libC.dylib'}
+
+ out = macho_make_paths_normal('/Users/Shared/spack/pkgC/lib/libC.dylib',
+ ('@loader_path/../../pkgA/lib',
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib'),
+ ('@loader_path/../../pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib'),
+ '@rpath/libC.dylib')
+
+ assert out == {'@rpath/libC.dylib':
+ '/Users/Shared/spack/pkgC/lib/libC.dylib',
+ '@loader_path/../../pkgA/lib':
+ '/Users/Shared/spack/pkgA/lib',
+ '@loader_path/../../pkgB/lib':
+ '/Users/Shared/spack/pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '@loader_path/../../pkgA/libA.dylib':
+ '/Users/Shared/spack/pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib':
+ '/Users/Shared/spack/pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib'
+ }
out = macho_make_paths_relative('/Users/Shared/spack/pkgC/bin/exeC',
'/Users/Shared/spack',
@@ -281,98 +529,47 @@ def test_macho_paths():
'/Users/Shared/spack/pkgB/libB.dylib',
'/usr/local/lib/libloco.dylib'), None)
- assert out == (['@loader_path/../../pkgA/lib',
- '@loader_path/../../pkgB/lib',
- '/usr/local/lib'],
- ['@loader_path/../../pkgA/libA.dylib',
- '@loader_path/../../pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'], None)
-
- out = macho_replace_paths('/Users/Shared/spack',
- '/Applications/spack',
- ('/Users/Shared/spack/pkgA/lib',
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib'),
- ('/Users/Shared/spack/pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'),
- '/Users/Shared/spack/pkgC/lib/libC.dylib')
- assert out == (['/Applications/spack/pkgA/lib',
- '/Applications/spack/pkgB/lib',
- '/usr/local/lib'],
- ['/Applications/spack/pkgA/libA.dylib',
- '/Applications/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'],
- '/Applications/spack/pkgC/lib/libC.dylib')
-
- out = macho_replace_paths('/Users/Shared/spack',
- '/Applications/spack',
- ('/Users/Shared/spack/pkgA/lib',
- '/Users/Shared/spack/pkgB/lib',
- '/usr/local/lib'),
- ('/Users/Shared/spack/pkgA/libA.dylib',
- '/Users/Shared/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'),
- None)
- assert out == (['/Applications/spack/pkgA/lib',
- '/Applications/spack/pkgB/lib',
- '/usr/local/lib'],
- ['/Applications/spack/pkgA/libA.dylib',
- '/Applications/spack/pkgB/libB.dylib',
- '/usr/local/lib/libloco.dylib'],
- None)
+ assert out == {'/Users/Shared/spack/pkgA/lib':
+ '@loader_path/../../pkgA/lib',
+ '/Users/Shared/spack/pkgB/lib':
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '/Users/Shared/spack/pkgA/libA.dylib':
+ '@loader_path/../../pkgA/libA.dylib',
+ '/Users/Shared/spack/pkgB/libB.dylib':
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib'}
+
+ out = macho_make_paths_normal('/Users/Shared/spack/pkgC/bin/exeC',
+ ('@loader_path/../../pkgA/lib',
+ '@loader_path/../../pkgB/lib',
+ '/usr/local/lib'),
+ ('@loader_path/../../pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib'),
+ None)
+
+ assert out == {'@loader_path/../../pkgA/lib':
+ '/Users/Shared/spack/pkgA/lib',
+ '@loader_path/../../pkgB/lib':
+ '/Users/Shared/spack/pkgB/lib',
+ '/usr/local/lib': '/usr/local/lib',
+ '@loader_path/../../pkgA/libA.dylib':
+ '/Users/Shared/spack/pkgA/libA.dylib',
+ '@loader_path/../../pkgB/libB.dylib':
+ '/Users/Shared/spack/pkgB/libB.dylib',
+ '/usr/local/lib/libloco.dylib':
+ '/usr/local/lib/libloco.dylib'}
def test_elf_paths():
- out = get_relative_rpaths(
+ out = get_relative_elf_rpaths(
'/usr/bin/test', '/usr',
('/usr/lib', '/usr/lib64', '/opt/local/lib'))
assert out == ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib']
- out = substitute_rpath(
- ('/usr/lib', '/usr/lib64', '/opt/local/lib'), '/usr', '/opt')
- assert out == ['/opt/lib', '/opt/lib64', '/opt/local/lib']
-
-
-@pytest.mark.skipif(sys.platform != 'darwin',
- reason="only works with Mach-o objects")
-def test_relocate_macho(tmpdir):
- with tmpdir.as_cwd():
-
- get_patchelf() # this does nothing on Darwin
-
- rpaths, deps, idpath = macho_get_paths('/bin/bash')
- nrpaths, ndeps, nid = macho_make_paths_relative('/bin/bash', '/usr',
- rpaths, deps, idpath)
- shutil.copyfile('/bin/bash', 'bash')
- modify_macho_object('bash',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
-
- rpaths, deps, idpath = macho_get_paths('/bin/bash')
- nrpaths, ndeps, nid = macho_replace_paths('/usr', '/opt',
- rpaths, deps, idpath)
- shutil.copyfile('/bin/bash', 'bash')
- modify_macho_object('bash',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
-
- path = '/usr/lib/libncurses.5.4.dylib'
- rpaths, deps, idpath = macho_get_paths(path)
- nrpaths, ndeps, nid = macho_make_paths_relative(path, '/usr',
- rpaths, deps, idpath)
- shutil.copyfile(
- '/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
- modify_macho_object('libncurses.5.4.dylib',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
-
- rpaths, deps, idpath = macho_get_paths(path)
- nrpaths, ndeps, nid = macho_replace_paths('/usr', '/opt',
- rpaths, deps, idpath)
- shutil.copyfile(
- '/usr/lib/libncurses.5.4.dylib', 'libncurses.5.4.dylib')
- modify_macho_object(
- 'libncurses.5.4.dylib',
- rpaths, deps, idpath,
- nrpaths, ndeps, nid)
+ out = get_normalized_elf_rpaths(
+ '/usr/bin/test',
+ ['$ORIGIN/../lib', '$ORIGIN/../lib64', '/opt/local/lib'])
+ assert out == ['/usr/lib', '/usr/lib64', '/opt/local/lib']
diff --git a/lib/spack/spack/test/url_fetch.py b/lib/spack/spack/test/url_fetch.py
index 679240049d..20648b4766 100644
--- a/lib/spack/spack/test/url_fetch.py
+++ b/lib/spack/spack/test/url_fetch.py
@@ -26,10 +26,10 @@ def checksum_type(request):
@pytest.fixture
def pkg_factory():
Pkg = collections.namedtuple(
- 'Pkg', ['url_for_version', 'urls', 'url', 'versions']
+ 'Pkg', ['url_for_version', 'urls', 'url', 'versions', 'fetch_options']
)
- def factory(url, urls):
+ def factory(url, urls, fetch_options={}):
def fn(v):
main_url = url or urls[0]
@@ -37,7 +37,8 @@ def pkg_factory():
return Pkg(
url_for_version=fn, url=url, urls=urls,
- versions=collections.defaultdict(dict)
+ versions=collections.defaultdict(dict),
+ fetch_options=fetch_options
)
return factory
@@ -130,6 +131,10 @@ def test_from_list_url(mock_packages, config, spec, url, digest):
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == url
assert fetch_strategy.digest == digest
+ assert fetch_strategy.extra_options == {}
+ pkg.fetch_options = {'timeout': 60}
+ fetch_strategy = fs.from_list_url(pkg)
+ assert fetch_strategy.extra_options == {'timeout': 60}
def test_from_list_url_unspecified(mock_packages, config):
@@ -142,6 +147,10 @@ def test_from_list_url_unspecified(mock_packages, config):
assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == 'foo-2.0.0.tar.gz'
assert fetch_strategy.digest is None
+ assert fetch_strategy.extra_options == {}
+ pkg.fetch_options = {'timeout': 60}
+ fetch_strategy = fs.from_list_url(pkg)
+ assert fetch_strategy.extra_options == {'timeout': 60}
def test_nosource_from_list_url(mock_packages, config):
@@ -191,3 +200,7 @@ def test_candidate_urls(pkg_factory, url, urls, version, expected):
pkg = pkg_factory(url, urls)
f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
assert f.candidate_urls == expected
+ assert f.extra_options == {}
+ pkg = pkg_factory(url, urls, fetch_options={'timeout': 60})
+ f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
+ assert f.extra_options == {'timeout': 60}
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index d617954ab1..1688b49f1b 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -32,6 +32,9 @@ def decompressor_for(path, extension=None):
if extension and re.match(r'gz', extension):
gunzip = which('gunzip', required=True)
return gunzip
+ if extension and re.match(r'bz2', extension):
+ bunzip2 = which('bunzip2', required=True)
+ return bunzip2
tar = which('tar', required=True)
tar.add_default_arg('-xf')
return tar
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 4fb8c5a591..8039dc5fda 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -205,6 +205,8 @@ def push_to_url(
# needs to be done in separate steps.
shutil.copy2(local_file_path, remote_file_path)
os.remove(local_file_path)
+ else:
+ raise
elif remote_url.scheme == 's3':
if extra_args is None: