summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/build_systems.rst1
-rw-r--r--lib/spack/docs/build_systems/bundlepackage.rst52
-rw-r--r--lib/spack/docs/conf.py20
-rw-r--r--lib/spack/docs/contribution_guide.rst20
-rw-r--r--lib/spack/docs/extensions.rst6
-rw-r--r--lib/spack/docs/getting_started.rst2
-rw-r--r--lib/spack/docs/packaging_guide.rst32
-rw-r--r--lib/spack/docs/requirements.txt6
-rw-r--r--lib/spack/llnl/util/argparsewriter.py5
-rw-r--r--lib/spack/llnl/util/cpu/detect.py57
-rw-r--r--lib/spack/llnl/util/cpu/microarchitectures.json237
-rw-r--r--lib/spack/llnl/util/cpu/schema.py16
-rw-r--r--lib/spack/llnl/util/filesystem.py4
-rw-r--r--lib/spack/spack/architecture.py7
-rw-r--r--lib/spack/spack/binary_distribution.py4
-rw-r--r--lib/spack/spack/build_systems/cuda.py13
-rw-r--r--lib/spack/spack/build_systems/gnu.py37
-rw-r--r--lib/spack/spack/build_systems/intel.py78
-rw-r--r--lib/spack/spack/build_systems/octave.py9
-rw-r--r--lib/spack/spack/cmd/__init__.py3
-rw-r--r--lib/spack/spack/cmd/buildcache.py8
-rw-r--r--lib/spack/spack/cmd/checksum.py1
-rw-r--r--lib/spack/spack/cmd/create.py8
-rw-r--r--lib/spack/spack/cmd/find.py8
-rw-r--r--lib/spack/spack/cmd/install.py56
-rw-r--r--lib/spack/spack/cmd/url.py45
-rw-r--r--lib/spack/spack/compiler.py11
-rw-r--r--lib/spack/spack/compilers/pgi.py1
-rw-r--r--lib/spack/spack/environment.py5
-rw-r--r--lib/spack/spack/fetch_strategy.py142
-rw-r--r--lib/spack/spack/mirror.py5
-rw-r--r--lib/spack/spack/package.py95
-rw-r--r--lib/spack/spack/pkgkit.py1
-rw-r--r--lib/spack/spack/relocate.py30
-rw-r--r--lib/spack/spack/reporters/cdash.py5
-rw-r--r--lib/spack/spack/s3_handler.py4
-rw-r--r--lib/spack/spack/spec.py6
-rw-r--r--lib/spack/spack/stage.py10
-rw-r--r--lib/spack/spack/test/architecture.py2
-rw-r--r--lib/spack/spack/test/build_distribution.py41
-rw-r--r--lib/spack/spack/test/build_environment.py14
-rw-r--r--lib/spack/spack/test/cmd/__init__.py4
-rw-r--r--lib/spack/spack/test/cmd/buildcache.py23
-rw-r--r--lib/spack/spack/test/cmd/common/__init__.py4
-rw-r--r--lib/spack/spack/test/cmd/env.py35
-rw-r--r--lib/spack/spack/test/cmd/find.py16
-rw-r--r--lib/spack/spack/test/cmd/flake8.py2
-rw-r--r--lib/spack/spack/test/cmd/install.py15
-rw-r--r--lib/spack/spack/test/cmd/mirror.py67
-rw-r--r--lib/spack/spack/test/conftest.py3
-rw-r--r--lib/spack/spack/test/data/targets/linux-centos7-cascadelake20
-rw-r--r--lib/spack/spack/test/data/targets/linux-centos7-thunderx28
-rw-r--r--lib/spack/spack/test/fetch_strategy.py17
-rw-r--r--lib/spack/spack/test/llnl/util/__init__.py4
-rw-r--r--lib/spack/spack/test/llnl/util/cpu.py19
-rw-r--r--lib/spack/spack/test/modules/__init__.py4
-rw-r--r--lib/spack/spack/test/package_class.py38
-rw-r--r--lib/spack/spack/test/package_sanity.py23
-rw-r--r--lib/spack/spack/test/packages.py58
-rw-r--r--lib/spack/spack/test/patch.py15
-rw-r--r--lib/spack/spack/test/relocate.py9
-rw-r--r--lib/spack/spack/test/s3_fetch.py29
-rw-r--r--lib/spack/spack/test/spec_semantics.py8
-rw-r--r--lib/spack/spack/test/url_fetch.py59
-rw-r--r--lib/spack/spack/test/url_parse.py138
-rw-r--r--lib/spack/spack/test/util/__init__.py4
-rw-r--r--lib/spack/spack/test/util/executable.py2
-rw-r--r--lib/spack/spack/test/util/util_url.py7
-rw-r--r--lib/spack/spack/test/versions.py2
-rw-r--r--lib/spack/spack/test/web.py72
-rw-r--r--lib/spack/spack/url.py76
-rw-r--r--lib/spack/spack/util/package_hash.py13
-rw-r--r--lib/spack/spack/util/url.py4
-rw-r--r--lib/spack/spack/util/web.py190
-rw-r--r--lib/spack/spack/variant.py15
75 files changed, 1530 insertions, 580 deletions
diff --git a/lib/spack/docs/build_systems.rst b/lib/spack/docs/build_systems.rst
index 93b8e5c7a8..cdb12a8d91 100644
--- a/lib/spack/docs/build_systems.rst
+++ b/lib/spack/docs/build_systems.rst
@@ -56,6 +56,7 @@ on these ideas for each distinct build system that Spack supports:
:maxdepth: 1
:caption: Other
+ build_systems/bundlepackage
build_systems/cudapackage
build_systems/intelpackage
build_systems/custompackage
diff --git a/lib/spack/docs/build_systems/bundlepackage.rst b/lib/spack/docs/build_systems/bundlepackage.rst
new file mode 100644
index 0000000000..5f61b71351
--- /dev/null
+++ b/lib/spack/docs/build_systems/bundlepackage.rst
@@ -0,0 +1,52 @@
+.. Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+ Spack Project Developers. See the top-level COPYRIGHT file for details.
+
+ SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+.. _bundlepackage:
+
+-------------
+BundlePackage
+-------------
+
+``BundlePackage`` represents a set of packages that are expected to work well
+together, such as a collection of commonly used software libraries. The
+associated software is specified as bundle dependencies.
+
+
+^^^^^^^^
+Creation
+^^^^^^^^
+
+Be sure to specify the ``bundle`` template if you are using ``spack create``
+to generate a package from the template. For example, use the following
+command to create a bundle package whose class name will be ``Mybundle``:
+
+.. code-block:: console
+
+ $ spack create --template bundle --name mybundle
+
+
+
+^^^^^^
+Phases
+^^^^^^
+
+The ``BundlePackage`` base class does not provide any phases by default
+since the bundle does not represent a build system.
+
+
+^^^
+URL
+^^^
+
+The ``url`` property does not have meaning since there is no package-specific
+code to fetch.
+
+
+^^^^^^^
+Version
+^^^^^^^
+
+At least one ``version`` must be specified in order for the package to
+build.
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index e5305a9317..06b993efb5 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -176,7 +176,25 @@ exclude_patterns = ['_build', '_spack_root', '.spack-env']
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
-pygments_style = 'sphinx'
+# We use our own extension of the default style with a few modifications
+from pygments.style import Style
+from pygments.styles.default import DefaultStyle
+from pygments.token import Generic, Comment, Text
+
+class SpackStyle(DefaultStyle):
+ styles = DefaultStyle.styles.copy()
+ background_color = "#f4f4f8"
+ styles[Generic.Output] = "#355"
+ styles[Generic.Prompt] = "bold #346ec9"
+
+import pkg_resources
+dist = pkg_resources.Distribution(__file__)
+sys.path.append('.') # make 'conf' module findable
+ep = pkg_resources.EntryPoint.parse('spack = conf:SpackStyle', dist=dist)
+dist._ep_map = {'pygments.styles': {'plugin1': ep}}
+pkg_resources.working_set.add(dist)
+
+pygments_style = 'spack'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst
index 73f25c6f03..17e37eb92b 100644
--- a/lib/spack/docs/contribution_guide.rst
+++ b/lib/spack/docs/contribution_guide.rst
@@ -223,8 +223,7 @@ documentation. In order to prevent things like broken links and missing imports,
we added documentation tests that build the documentation and fail if there
are any warning or error messages.
-Building the documentation requires several dependencies, all of which can be
-installed with Spack:
+Building the documentation requires several dependencies:
* sphinx
* sphinxcontrib-programoutput
@@ -234,11 +233,18 @@ installed with Spack:
* mercurial
* subversion
+All of these can be installed with Spack, e.g.
+
+.. code-block:: console
+
+ $ spack install py-sphinx py-sphinxcontrib-programoutput py-sphinx-rtd-theme graphviz git mercurial subversion
+
.. warning::
Sphinx has `several required dependencies <https://github.com/spack/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
- If you installed ``py-sphinx`` with Spack, make sure to add all of these
- dependencies to your ``PYTHONPATH``. The easiest way to do this is to run:
+ If you're using a ``python`` from Spack and you installed
+ ``py-sphinx`` and friends, you need to make them available to your
+ ``python``. The easiest way to do this is to run:
.. code-block:: console
@@ -246,8 +252,10 @@ installed with Spack:
$ spack activate py-sphinx-rtd-theme
$ spack activate py-sphinxcontrib-programoutput
- so that all of the dependencies are symlinked to a central location.
- If you see an error message like:
+ so that all of the dependencies are symlinked into that Python's
+ tree. Alternatively, you could arrange for their library
+ directories to be added to PYTHONPATH. If you see an error message
+ like:
.. code-block:: console
diff --git a/lib/spack/docs/extensions.rst b/lib/spack/docs/extensions.rst
index 8f4c54b435..18b4e4b41c 100644
--- a/lib/spack/docs/extensions.rst
+++ b/lib/spack/docs/extensions.rst
@@ -9,12 +9,6 @@
Custom Extensions
=================
-.. warning::
-
- The support for extending Spack with custom commands is still experimental.
- Users should expect APIs or prescribed directory structures to
- change at any time.
-
*Spack extensions* permit you to extend Spack capabilities by deploying your
own custom commands or logic in an arbitrary location on your filesystem.
This might be extremely useful e.g. to develop and maintain a command whose purpose is
diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst
index 7f3a34b62b..4d316b3bfa 100644
--- a/lib/spack/docs/getting_started.rst
+++ b/lib/spack/docs/getting_started.rst
@@ -97,7 +97,7 @@ Check Installation
With Spack installed, you should be able to run some basic Spack
commands. For example:
-.. command-output:: spack spec netcdf
+.. command-output:: spack spec netcdf-c
^^^^^^^^^^^^^^^^^^^^^^^^^^
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 7fc9d09e2d..f3b9295f61 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -553,6 +553,34 @@ version. This is useful for packages that have an easy to extrapolate URL, but
keep changing their URL format every few releases. With this method, you only
need to specify the ``url`` when the URL changes.
+"""""""""""""""""""""""
+Mirrors of the main URL
+"""""""""""""""""""""""
+
+Spack supports listing mirrors of the main URL in a package by defining
+the ``urls`` attribute:
+
+.. code-block:: python
+
+ class Foo(Package):
+
+ urls = [
+ 'http://example.com/foo-1.0.tar.gz',
+ 'http://mirror.com/foo-1.0.tar.gz'
+ ]
+
+instead of just a single ``url``. This attribute is a list of possible URLs that
+will be tried in order when fetching packages. Notice that either one of ``url``
+or ``urls`` can be present in a package, but not both at the same time.
+
+A well-known case of packages that can be fetched from multiple mirrors is that
+of GNU. For that, Spack goes a step further and defines a mixin class that
+takes care of all of the plumbing and requires packagers to just define a proper
+``gnu_mirror_path`` attribute:
+
+.. literalinclude:: _spack_root/var/spack/repos/builtin/packages/autoconf/package.py
+ :lines: 9-18
+
^^^^^^^^^^^^^^^^^^^^^^^^
Skipping the expand step
^^^^^^^^^^^^^^^^^^^^^^^^
@@ -1479,8 +1507,8 @@ that the same package with different patches applied will have different
hash identifiers. To ensure that the hashing scheme is consistent, you
must use a ``sha256`` checksum for the patch. Patches will be fetched
from their URLs, checked, and applied to your source code. You can use
-the ``spack sha256`` command to generate a checksum for a patch file or
-URL.
+the GNU utils ``sha256sum`` or the macOS ``shasum -a 256`` commands to
+generate a checksum for a patch file.
Spack can also handle compressed patches. If you use these, Spack needs
a little more help. Specifically, it needs *two* checksums: the
diff --git a/lib/spack/docs/requirements.txt b/lib/spack/docs/requirements.txt
index 190b018a0b..f5c50b0199 100644
--- a/lib/spack/docs/requirements.txt
+++ b/lib/spack/docs/requirements.txt
@@ -1,7 +1,7 @@
# These dependencies should be installed using pip in order
# to build the documentation.
-sphinx==2.0.1
-sphinxcontrib-programoutput==0.14
-sphinx-rtd-theme==0.4.3
+sphinx
+sphinxcontrib-programoutput
+sphinx-rtd-theme
python-levenshtein
diff --git a/lib/spack/llnl/util/argparsewriter.py b/lib/spack/llnl/util/argparsewriter.py
index 16bb570a77..a942f511f2 100644
--- a/lib/spack/llnl/util/argparsewriter.py
+++ b/lib/spack/llnl/util/argparsewriter.py
@@ -10,9 +10,10 @@ import errno
import sys
-class ArgparseWriter(object):
+class ArgparseWriter(argparse.HelpFormatter):
"""Analyzes an argparse ArgumentParser for easy generation of help."""
def __init__(self, out=sys.stdout):
+ super(ArgparseWriter, self).__init__(out)
self.level = 0
self.out = out
@@ -48,7 +49,7 @@ class ArgparseWriter(object):
def action_group(function, actions):
for action in actions:
arg = fmt._format_action_invocation(action)
- help = action.help if action.help else ''
+ help = self._expand_help(action) if action.help else ''
function(arg, re.sub('\n', ' ', help))
if root:
diff --git a/lib/spack/llnl/util/cpu/detect.py b/lib/spack/llnl/util/cpu/detect.py
index c89f67c852..76465037bf 100644
--- a/lib/spack/llnl/util/cpu/detect.py
+++ b/lib/spack/llnl/util/cpu/detect.py
@@ -13,6 +13,7 @@ import warnings
import six
from .microarchitecture import generic_microarchitecture, targets
+from .schema import targets_json
#: Mapping from operating systems to chain of commands
#: to obtain a dictionary of raw info on the current cpu
@@ -108,21 +109,37 @@ def sysctl_info_dict():
'model': sysctl('-n', 'machdep.cpu.model'),
'model name': sysctl('-n', 'machdep.cpu.brand_string')
}
+ return info
- # Super hacky way to deal with slight representation differences
- # Would be better to somehow consider these "identical"
- if 'sse4.1' in info['flags']:
- info['flags'] += ' sse4_1'
- if 'sse4.2' in info['flags']:
- info['flags'] += ' sse4_2'
- if 'avx1.0' in info['flags']:
- info['flags'] += ' avx'
- if 'clfsopt' in info['flags']:
- info['flags'] += ' clflushopt'
- if 'xsave' in info['flags']:
- info['flags'] += ' xsavec xsaveopt'
- return info
+def adjust_raw_flags(info):
+ """Adjust the flags detected on the system to homogenize
+ slightly different representations.
+ """
+ # Flags detected on Darwin turned to their linux counterpart
+ flags = info.get('flags', [])
+ d2l = targets_json['conversions']['darwin_flags']
+ for darwin_flag, linux_flag in d2l.items():
+ if darwin_flag in flags:
+ info['flags'] += ' ' + linux_flag
+
+
+def adjust_raw_vendor(info):
+ """Adjust the vendor field to make it human readable"""
+ if 'CPU implementer' not in info:
+ return
+
+ # Mapping numeric codes to vendor (ARM). This list is a merge from
+ # different sources:
+ #
+ # https://github.com/karelzak/util-linux/blob/master/sys-utils/lscpu-arm.c
+ # https://developer.arm.com/docs/ddi0487/latest/arm-architecture-reference-manual-armv8-for-armv8-a-architecture-profile
+ # https://github.com/gcc-mirror/gcc/blob/master/gcc/config/aarch64/aarch64-cores.def
+ # https://patchwork.kernel.org/patch/10524949/
+ arm_vendors = targets_json['conversions']['arm_vendors']
+ arm_code = info['CPU implementer']
+ if arm_code in arm_vendors:
+ info['CPU implementer'] = arm_vendors[arm_code]
def raw_info_dictionary():
@@ -139,6 +156,8 @@ def raw_info_dictionary():
warnings.warn(str(e))
if info:
+ adjust_raw_flags(info)
+ adjust_raw_vendor(info)
break
return info
@@ -223,3 +242,15 @@ def compatibility_check_for_x86_64(info, target):
return (target == arch_root or arch_root in target.ancestors) \
and (target.vendor == vendor or target.vendor == 'generic') \
and target.features.issubset(features)
+
+
+@compatibility_check(architecture_family='aarch64')
+def compatibility_check_for_aarch64(info, target):
+ basename = 'aarch64'
+ features = set(info.get('Features', '').split())
+ vendor = info.get('CPU implementer', 'generic')
+
+ arch_root = targets[basename]
+ return (target == arch_root or arch_root in target.ancestors) \
+ and (target.vendor == vendor or target.vendor == 'generic') \
+ and target.features.issubset(features)
diff --git a/lib/spack/llnl/util/cpu/microarchitectures.json b/lib/spack/llnl/util/cpu/microarchitectures.json
index bb3b4db21a..55b40d65ae 100644
--- a/lib/spack/llnl/util/cpu/microarchitectures.json
+++ b/lib/spack/llnl/util/cpu/microarchitectures.json
@@ -64,13 +64,13 @@
"clang": [
{
"versions": "0.0.0-apple:",
- "family": "x86-64",
- "flags": "-march={family}"
+ "name": "x86-64",
+ "flags": "-march={name}"
},
{
"versions": ":",
- "family": "x86-64",
- "flags": "-march={family} -mcpu=generic"
+ "name": "x86-64",
+ "flags": "-march={name} -mtune=generic"
}
],
"intel": {
@@ -96,8 +96,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -122,8 +121,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -157,8 +155,7 @@
],
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -188,8 +185,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -227,8 +223,7 @@
],
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": [
{
@@ -274,8 +269,7 @@
],
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": [
{
@@ -326,8 +320,7 @@
],
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": [
{
@@ -373,8 +366,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -416,8 +408,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -463,8 +454,7 @@
"clang": {
"versions": "3.9:",
"name": "knl",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -515,8 +505,7 @@
"clang": {
"versions": "3.9:",
"name": "skylake-avx512",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -568,8 +557,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "18.0:",
@@ -609,7 +597,7 @@
"avx512bw",
"avx512dq",
"avx512cd",
- "avx512vnni"
+ "avx512_vnni"
],
"compilers": {
"gcc": {
@@ -618,8 +606,7 @@
},
"clang": {
"versions": "8.0:",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "19.0:",
@@ -685,13 +672,11 @@
{
"versions": "7.0:",
"name": "icelake-client",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
{
"versions": "6.0:6.9",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
}
],
"intel": {
@@ -723,8 +708,7 @@
"clang": {
"versions": "3.9:",
"name": "amdfam10",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -761,8 +745,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver1",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -803,8 +786,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver2",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -846,8 +828,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver3",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -892,8 +873,7 @@
"clang": {
"versions": "3.9:",
"name": "bdver4",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -942,8 +922,7 @@
"clang": {
"versions": "4.0:",
"name": "znver1",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -993,8 +972,7 @@
"clang": {
"versions": "9.0:",
"name": "znver2",
- "family": "x86-64",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-march={name} -mtune={name}"
},
"intel": {
"versions": "16.0:",
@@ -1016,8 +994,7 @@
},
"clang": {
"versions": ":",
- "family": "ppc64",
- "flags": "-march={family} -mcpu=generic"
+ "flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1033,9 +1010,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "ppc64",
- "name": "pwr7",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1058,9 +1033,7 @@
],
"clang": {
"versions": "3.9:",
- "family": "ppc64",
- "name": "pwr8",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1076,9 +1049,7 @@
},
"clang": {
"versions": "3.9:",
- "family": "ppc64",
- "name": "pwr9",
- "flags": "-march={family} -mcpu={name}"
+ "flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1094,8 +1065,7 @@
},
"clang": {
"versions": ":",
- "family": "ppc64le",
- "flags": "-march={family} -mcpu=generic"
+ "flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1121,8 +1091,8 @@
"clang": {
"versions": "3.9:",
"family": "ppc64le",
- "name": "pwr8",
- "flags": "-march={family} -mcpu={name}"
+ "name": "power8",
+ "flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1140,8 +1110,8 @@
"clang": {
"versions": "3.9:",
"family": "ppc64le",
- "name": "pwr9",
- "flags": "-march={family} -mcpu={name}"
+ "name": "power9",
+ "flags": "-mcpu={name} -mtune={name}"
}
}
},
@@ -1156,11 +1126,113 @@
},
"clang": {
"versions": ":",
- "family": "aarch64",
- "flags": "-march={family} -mcpu=generic"
+ "flags": "-march=armv8-a -mtune=generic"
}
}
},
+ "thunderx2": {
+ "from": "aarch64",
+ "vendor": "Cavium",
+ "features": [
+ "fp",
+ "asimd",
+ "evtstrm",
+ "aes",
+ "pmull",
+ "sha1",
+ "sha2",
+ "crc32",
+ "atomics",
+ "cpuid",
+ "asimdrdm"
+ ],
+ "compilers": {
+ "gcc": [
+ {
+ "versions": "4.8:4.8.9",
+ "flags": "-march=armv8-a"
+ },
+ {
+ "versions": "4.9:5.9",
+ "flags": "-march=armv8-a+crc+crypto"
+ },
+ {
+ "versions": "6:6.9",
+ "flags": "-march=armv8.1-a+crc+crypto"
+ },
+ {
+ "versions": "7:",
+ "flags": "-mcpu=thunderx2t99"
+ }
+ ],
+ "clang": [
+ {
+ "versions": "3.9:4.9",
+ "flags": "-march=armv8.1-a+crc+crypto"
+ },
+ {
+ "versions": "5:",
+ "flags": "-mcpu=thunderx2t99"
+ }
+ ]
+ }
+ },
+ "a64fx": {
+ "from": "aarch64",
+ "vendor": "Fujitsu",
+ "features": [
+ "fp",
+ "asimd",
+ "evtstrm",
+ "aes",
+ "pmull",
+ "sha1",
+ "sha2",
+ "crc32",
+ "atomics",
+ "cpuid",
+ "asimdrdm",
+ "fphp",
+ "asimdhp",
+ "fcma",
+ "dcpop",
+ "sve"
+ ],
+ "compilers": {
+ "gcc": [
+ {
+ "versions": "4.8:4.8.9",
+ "flags": "-march=armv8-a"
+ },
+ {
+ "versions": "4.9:5.9",
+ "flags": "-march=armv8-a+crc+crypto"
+ },
+ {
+ "versions": "6:6.9",
+ "flags": "-march=armv8.1-a+crc+crypto"
+ },
+ {
+ "versions": "7:7.9",
+ "flags": "-march=armv8.2a+crc+crypto+fp16"
+ },
+ {
+ "versions": "8:",
+ "flags": "-march=armv8.2a+crc+aes+sha2+fp16+sve -msve-vector-bits=512"
+ }
+ ],
+ "clang": [
+ {
+ "versions": "3.9:4.9",
+ "flags": "-march=armv8.2-a+crc+crypto+fp16"
+ },
+ {
+ "versions": "5:",
+ "flags": "-march=armv8.2-a+crc+crypto+fp16+sve"
+ }
+ ]
+ }
+ },
"arm": {
"from": null,
"vendor": "generic",
@@ -1244,5 +1316,34 @@
"aarch64"
]
}
+ },
+ "conversions": {
+ "description": "Conversions that map some platform specific values to canonical values",
+ "arm_vendors": {
+ "0x41": "ARM",
+ "0x42": "Broadcom",
+ "0x43": "Cavium",
+ "0x44": "DEC",
+ "0x46": "Fujitsu",
+ "0x48": "HiSilicon",
+ "0x49": "Infineon Technologies AG",
+ "0x4d": "Motorola",
+ "0x4e": "Nvidia",
+ "0x50": "APM",
+ "0x51": "Qualcomm",
+ "0x53": "Samsung",
+ "0x56": "Marvell",
+ "0x61": "Apple",
+ "0x66": "Faraday",
+ "0x68": "HXT",
+ "0x69": "Intel"
+ },
+ "darwin_flags": {
+ "sse4.1": "sse4_1",
+ "sse4.2": "sse4_2",
+ "avx1.0": "avx",
+ "clfsopt": "clflushopt",
+ "xsave": "xsavec xsaveopt"
+ }
}
}
diff --git a/lib/spack/llnl/util/cpu/schema.py b/lib/spack/llnl/util/cpu/schema.py
index d13d014c01..cc15cb64ba 100644
--- a/lib/spack/llnl/util/cpu/schema.py
+++ b/lib/spack/llnl/util/cpu/schema.py
@@ -72,7 +72,21 @@ properties = {
'additionalProperties': False
}
},
-
+ },
+ 'conversions': {
+ 'type': 'object',
+ 'properties': {
+ 'description': {
+ 'type': 'string'
+ },
+ 'arm_vendors': {
+ 'type': 'object',
+ },
+ 'darwin_flags': {
+ 'type': 'object'
+ }
+ },
+ 'additionalProperties': False
}
}
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index f6c8d161d7..e9dd88ffdb 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -1154,7 +1154,9 @@ class HeaderList(FileList):
# Make sure to only match complete words, otherwise path components such
# as "xinclude" will cause false matches.
- include_regex = re.compile(r'(.*)(\binclude\b)(.*)')
+ # Avoid matching paths such as <prefix>/include/something/detail/include,
+ # e.g. in the CUDA Toolkit which ships internal libc++ headers.
+ include_regex = re.compile(r'(.*?)(\binclude\b)(.*)')
def __init__(self, files):
super(HeaderList, self).__init__(files)
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index 7552795cd2..494563b0cb 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -69,6 +69,7 @@ from llnl.util.lang import memoized, list_modules, key_ordering
import spack.compiler
import spack.paths
import spack.error as serr
+import spack.util.executable
import spack.version
from spack.util.naming import mod_to_class
from spack.util.spack_yaml import syaml_dict
@@ -214,7 +215,11 @@ class Target(object):
import spack.spec
if isinstance(compiler, spack.spec.CompilerSpec):
compiler = spack.compilers.compilers_for_spec(compiler).pop()
- compiler_version = compiler.cc_version(compiler.cc)
+ try:
+ compiler_version = compiler.cc_version(compiler.cc)
+ except spack.util.executable.ProcessError as e:
+ # log this and just return compiler.version instead
+ tty.debug(str(e))
return self.microarchitecture.optimization_flags(
compiler.name, str(compiler_version)
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 3b10cca180..6d9d45e12a 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -272,7 +272,7 @@ def generate_package_index(cache_prefix):
Creates (or replaces) the "index.html" page at the location given in
cache_prefix. This page contains a link for each binary package (*.yaml)
- and signing key (*.key) under cache_prefix.
+ and public key (*.key) under cache_prefix.
"""
tmpdir = tempfile.mkdtemp()
try:
@@ -679,7 +679,7 @@ def get_specs(force=False):
return _cached_specs
if not spack.mirror.MirrorCollection():
- tty.warn("No Spack mirrors are currently configured")
+ tty.debug("No Spack mirrors are currently configured")
return {}
urls = set()
diff --git a/lib/spack/spack/build_systems/cuda.py b/lib/spack/spack/build_systems/cuda.py
index b75807fd06..79c16114f9 100644
--- a/lib/spack/spack/build_systems/cuda.py
+++ b/lib/spack/spack/build_systems/cuda.py
@@ -61,12 +61,13 @@ class CudaPackage(PackageBase):
conflicts('%gcc@6:', when='+cuda ^cuda@:8' + arch_platform)
conflicts('%gcc@7:', when='+cuda ^cuda@:9.1' + arch_platform)
conflicts('%gcc@8:', when='+cuda ^cuda@:10.0.130' + arch_platform)
- conflicts('%gcc@9:', when='+cuda ^cuda@:10.1.243' + arch_platform)
+ conflicts('%gcc@9:', when='+cuda ^cuda@:10.2.89' + arch_platform)
conflicts('%pgi@:14.8', when='+cuda ^cuda@:7.0.27' + arch_platform)
conflicts('%pgi@:15.3,15.5:', when='+cuda ^cuda@7.5' + arch_platform)
conflicts('%pgi@:16.2,16.0:16.3', when='+cuda ^cuda@8' + arch_platform)
conflicts('%pgi@:15,18:', when='+cuda ^cuda@9.0:9.1' + arch_platform)
conflicts('%pgi@:16', when='+cuda ^cuda@9.2.88:10' + arch_platform)
+ conflicts('%pgi@:17', when='+cuda ^cuda@10.2.89' + arch_platform)
conflicts('%clang@:3.4', when='+cuda ^cuda@:7.5' + arch_platform)
conflicts('%clang@:3.7,4:',
when='+cuda ^cuda@8.0:9.0' + arch_platform)
@@ -74,6 +75,10 @@ class CudaPackage(PackageBase):
when='+cuda ^cuda@9.1' + arch_platform)
conflicts('%clang@:3.7,5.1:', when='+cuda ^cuda@9.2' + arch_platform)
conflicts('%clang@:3.7,6.1:', when='+cuda ^cuda@10.0.130' + arch_platform)
+ conflicts('%clang@:3.7,7.1:', when='+cuda ^cuda@10.1.105' + arch_platform)
+ conflicts('%clang@:3.7,8.1:',
+ when='+cuda ^cuda@10.1.105:10.1.243' + arch_platform)
+ conflicts('%clang@:3.2,9.0:', when='+cuda ^cuda@10.2.89' + arch_platform)
# x86_64 vs. ppc64le differ according to NVidia docs
# Linux ppc64le compiler conflicts from Table from the docs below:
@@ -95,6 +100,8 @@ class CudaPackage(PackageBase):
conflicts('%clang@5:', when='+cuda ^cuda@:9.1' + arch_platform)
conflicts('%clang@6:', when='+cuda ^cuda@:9.2' + arch_platform)
conflicts('%clang@7:', when='+cuda ^cuda@10.0.130' + arch_platform)
+ conflicts('%clang@7.1:', when='+cuda ^cuda@:10.1.105' + arch_platform)
+ conflicts('%clang@8.1:', when='+cuda ^cuda@:10.2.89' + arch_platform)
# Intel is mostly relevant for x86_64 Linux, even though it also
# exists for Mac OS X. No information prior to CUDA 3.2 or Intel 11.1
@@ -107,12 +114,12 @@ class CudaPackage(PackageBase):
conflicts('%intel@16.0:', when='+cuda ^cuda@:8.0.43')
conflicts('%intel@17.0:', when='+cuda ^cuda@:8.0.60')
conflicts('%intel@18.0:', when='+cuda ^cuda@:9.9')
- conflicts('%intel@19.0:', when='+cuda ^cuda@:10.0')
+ conflicts('%intel@19.0:', when='+cuda ^cuda@:10.2.89')
# XL is mostly relevant for ppc64le Linux
conflicts('%xl@:12,14:', when='+cuda ^cuda@:9.1')
conflicts('%xl@:12,14:15,17:', when='+cuda ^cuda@9.2')
- conflicts('%xl@17:', when='+cuda ^cuda@10.0.130')
+ conflicts('%xl@17:', when='+cuda ^cuda@10.0.130:10.2.89')
# Mac OS X
# platform = ' platform=darwin'
diff --git a/lib/spack/spack/build_systems/gnu.py b/lib/spack/spack/build_systems/gnu.py
new file mode 100644
index 0000000000..0fe6f5f780
--- /dev/null
+++ b/lib/spack/spack/build_systems/gnu.py
@@ -0,0 +1,37 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import os.path
+
+import spack.package
+
+
+class GNUMirrorPackage(spack.package.PackageBase):
+ """Mixin that takes care of setting url and mirrors for GNU packages."""
+ #: Path of the package in a GNU mirror
+ gnu_mirror_path = None
+
+ #: List of GNU mirrors used by Spack
+ base_mirrors = [
+ 'https://ftp.gnu.org/gnu',
+ 'https://ftpmirror.gnu.org/',
+ # Fall back to http if https didn't work (for instance because
+ # Spack is bootstrapping curl)
+ 'http://ftpmirror.gnu.org/'
+ ]
+
+ @property
+ def urls(self):
+ self._ensure_gnu_mirror_path_is_set_or_raise()
+ return [
+ os.path.join(m, self.gnu_mirror_path) for m in self.base_mirrors
+ ]
+
+ def _ensure_gnu_mirror_path_is_set_or_raise(self):
+ if self.gnu_mirror_path is None:
+ cls_name = type(self).__name__
+ msg = ('{0} must define a `gnu_mirror_path` attribute'
+ ' [none defined]')
+ raise AttributeError(msg.format(cls_name))
diff --git a/lib/spack/spack/build_systems/intel.py b/lib/spack/spack/build_systems/intel.py
index 858b4e37fb..0ec81f3537 100644
--- a/lib/spack/spack/build_systems/intel.py
+++ b/lib/spack/spack/build_systems/intel.py
@@ -89,7 +89,7 @@ class IntelPackage(PackageBase):
2. :py:meth:`~.IntelPackage.install`
They both have sensible defaults and for many packages the
- only thing necessary will be to override setup_environment
+ only thing necessary will be to override setup_run_environment
to set the appropriate environment variables.
"""
#: Phases of an Intel package
@@ -455,9 +455,7 @@ class IntelPackage(PackageBase):
break
if not matching_dirs:
- # No match -- this *will* happen during pre-build call to
- # setup_environment() when the destination dir is still empty.
- # Return a sensible value anyway.
+ # No match -- return a sensible value anyway.
d = unversioned_dirname
debug_print(d)
@@ -786,7 +784,8 @@ class IntelPackage(PackageBase):
debug_print(mkl_libs)
if len(mkl_libs) < 3:
- raise_lib_error('Cannot locate core MKL libraries:', mkl_libnames)
+ raise_lib_error('Cannot locate core MKL libraries:', mkl_libnames,
+ 'in:', self.component_lib_dir('mkl'))
# The Intel MKL link line advisor recommends these system libraries
system_libs = find_system_libraries(
@@ -888,15 +887,15 @@ class IntelPackage(PackageBase):
# debug_print("wrapper_vars =", wrapper_vars)
return wrapper_vars
- def mpi_setup_dependent_environment(
- self, spack_env, run_env, dependent_spec, compilers_of_client={}):
- '''Unified back-end for setup_dependent_environment() of Intel packages
- that provide 'mpi'.
+ def mpi_setup_dependent_build_environment(
+ self, env, dependent_spec, compilers_of_client={}):
+ '''Unified back-end for setup_dependent_build_environment() of
+ Intel packages that provide 'mpi'.
Parameters:
- spack_env, run_env, dependent_spec: same as in
- setup_dependent_environment().
+ env, dependent_spec: same as in
+ setup_dependent_build_environment().
compilers_of_client (dict): Conveys spack_cc, spack_cxx, etc.,
from the scope of dependent packages; constructed in caller.
@@ -938,12 +937,12 @@ class IntelPackage(PackageBase):
# Ensure that the directory containing the compiler wrappers is in the
# PATH. Spack packages add `prefix.bin` to their dependents' paths,
# but because of the intel directory hierarchy that is insufficient.
- spack_env.prepend_path('PATH', os.path.dirname(wrapper_vars['MPICC']))
+ env.prepend_path('PATH', os.path.dirname(wrapper_vars['MPICC']))
for key, value in wrapper_vars.items():
- spack_env.set(key, value)
+ env.set(key, value)
- debug_print("adding to spack_env:", wrapper_vars)
+ debug_print("adding to build env:", wrapper_vars)
# ---------------------------------------------------------------------
# General support for child packages
@@ -994,7 +993,7 @@ class IntelPackage(PackageBase):
debug_print(result)
return result
- def setup_environment(self, spack_env, run_env):
+ def setup_run_environment(self, env):
"""Adds environment variables to the generated module file.
These environment variables come from running:
@@ -1004,24 +1003,7 @@ class IntelPackage(PackageBase):
$ source parallel_studio_xe_2017/bin/psxevars.sh intel64
[and likewise for MKL, MPI, and other components]
"""
- # https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_environment
- #
- # spack_env -> Applied when dependent is built within Spack.
- # Not used here.
- # run_env -> Applied to the modulefile of dependent.
- #
- # NOTE: Spack runs setup_environment twice, once pre-build to set up
- # the build environment, and once post-installation to determine
- # the environment variables needed at run-time to add to the module
- # file. The script we need to source is only present post-installation,
- # so check for its existence before sourcing.
- # TODO: At some point we should split setup_environment into
- # setup_build_environment and setup_run_environment to get around
- # this problem.
f = self.file_to_source
- if not f or not os.path.isfile(f):
- return
-
tty.debug("sourcing " + f)
# All Intel packages expect at least the architecture as argument.
@@ -1033,15 +1015,9 @@ class IntelPackage(PackageBase):
# if sys.platform == 'darwin':
# args = ()
- run_env.extend(EnvironmentModifications.from_sourcing_file(f, *args))
+ env.extend(EnvironmentModifications.from_sourcing_file(f, *args))
- def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
- # https://spack.readthedocs.io/en/latest/spack.html#spack.package.PackageBase.setup_dependent_environment
- #
- # spack_env -> Applied when dependent is built within Spack.
- # run_env -> Applied to the modulefile of dependent.
- # Not used here.
- #
+ def setup_dependent_build_environment(self, env, dependent_spec):
# NB: This function is overwritten by 'mpi' provider packages:
#
# var/spack/repos/builtin/packages/intel-mpi/package.py
@@ -1051,18 +1027,20 @@ class IntelPackage(PackageBase):
# dictionary kwarg compilers_of_client{} present and populated.
# Handle everything in a callback version.
- self._setup_dependent_env_callback(spack_env, run_env, dependent_spec)
+ self._setup_dependent_env_callback(env, dependent_spec)
def _setup_dependent_env_callback(
- self, spack_env, run_env, dependent_spec, compilers_of_client={}):
- # Expected to be called from a client's setup_dependent_environment(),
+ self, env, dependent_spec, compilers_of_client={}):
+ # Expected to be called from a client's
+ # setup_dependent_build_environment(),
# with args extended to convey the client's compilers as needed.
if '+mkl' in self.spec or self.provides('mkl'):
# Spack's env philosophy demands that we replicate some of the
# settings normally handled by file_to_source ...
#
- # TODO: Why is setup_environment() [which uses file_to_source()]
+ # TODO: Why is setup_run_environment()
+ # [which uses file_to_source()]
# not called as a matter of course upon entering the current
# function? (guarding against multiple calls notwithstanding)
#
@@ -1072,16 +1050,16 @@ class IntelPackage(PackageBase):
'SPACK_COMPILER_EXTRA_RPATHS': self.component_lib_dir('mkl'),
}
- spack_env.set('MKLROOT', env_mods['MKLROOT'])
- spack_env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
- env_mods['SPACK_COMPILER_EXTRA_RPATHS'])
+ env.set('MKLROOT', env_mods['MKLROOT'])
+ env.append_path('SPACK_COMPILER_EXTRA_RPATHS',
+ env_mods['SPACK_COMPILER_EXTRA_RPATHS'])
- debug_print("adding/modifying spack_env:", env_mods)
+ debug_print("adding/modifying build env:", env_mods)
if '+mpi' in self.spec or self.provides('mpi'):
if compilers_of_client:
- self.mpi_setup_dependent_environment(
- spack_env, run_env, dependent_spec, compilers_of_client)
+ self.mpi_setup_dependent_build_environment(
+ env, dependent_spec, compilers_of_client)
# We could forego this nonce function and inline its code here,
# but (a) it sisters mpi_compiler_wrappers() [needed twice]
# which performs dizzyingly similar but necessarily different
diff --git a/lib/spack/spack/build_systems/octave.py b/lib/spack/spack/build_systems/octave.py
index 677e623bd6..5535cd4d9f 100644
--- a/lib/spack/spack/build_systems/octave.py
+++ b/lib/spack/spack/build_systems/octave.py
@@ -29,12 +29,11 @@ class OctavePackage(PackageBase):
extends('octave')
depends_on('octave', type=('build', 'run'))
- def setup_environment(self, spack_env, run_env):
- """Set up the compile and runtime environments for a package."""
+ def setup_build_environment(self, env):
# octave does not like those environment variables to be set:
- spack_env.unset('CC')
- spack_env.unset('CXX')
- spack_env.unset('FC')
+ env.unset('CC')
+ env.unset('CXX')
+ env.unset('FC')
def install(self, spec, prefix):
"""Install the package from the archive file"""
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 0d2e92940b..87526e32d9 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -212,6 +212,9 @@ def disambiguate_spec(spec, env, local=False, installed=True):
def gray_hash(spec, length):
+ if not length:
+ # default to maximum hash length
+ length = 32
h = spec.dag_hash(length) if spec.concrete else '-' * length
return colorize('@K{%s}' % h)
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index ee09a33f39..112949c338 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -416,11 +416,9 @@ def listspecs(args):
"""list binary packages available from mirrors"""
specs = bindist.get_specs(args.force)
if args.packages:
- pkgs = set(args.packages)
- specs = [s for s in specs for p in pkgs if s.satisfies(p)]
- display_specs(specs, args, all_headers=True)
- else:
- display_specs(specs, args, all_headers=True)
+ constraints = set(args.packages)
+ specs = [s for s in specs if any(s.satisfies(c) for c in constraints)]
+ display_specs(specs, args, all_headers=True)
def getkeys(args):
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 2518dfef9f..d8407651b6 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -61,3 +61,4 @@ def checksum(parser, args):
print()
print(version_lines)
+ print()
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index ee0fb3a347..a64d12539b 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -246,6 +246,7 @@ class PythonPackageTemplate(PackageTemplate):
dependencies = """\
# FIXME: Add dependencies if required.
+ # depends_on('python@2.X:2.Y,3.Z:', type=('build', 'run'))
# depends_on('py-setuptools', type='build')
# depends_on('py-foo', type=('build', 'run'))"""
@@ -426,7 +427,8 @@ def setup_parser(subparser):
'-n', '--name',
help="name of the package to create")
subparser.add_argument(
- '-t', '--template', metavar='TEMPLATE', choices=templates.keys(),
+ '-t', '--template', metavar='TEMPLATE',
+ choices=sorted(templates.keys()),
help="build system template to use. options: %(choices)s")
subparser.add_argument(
'-r', '--repo',
@@ -458,7 +460,7 @@ class BuildSystemGuesser:
the contents of its archive or the URL it was downloaded from."""
# Most octave extensions are hosted on Octave-Forge:
- # http://octave.sourceforge.net/index.html
+ # https://octave.sourceforge.net/index.html
# They all have the same base URL.
if url is not None and 'downloads.sourceforge.net/octave/' in url:
self.build_system = 'octave'
@@ -571,7 +573,7 @@ def get_url(args):
"""
# Default URL
- url = 'http://www.example.com/example-1.2.3.tar.gz'
+ url = 'https://www.example.com/example-1.2.3.tar.gz'
if args.url:
# Use a user-supplied URL if one is present
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 37cfcd30d5..65946e2a4c 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -4,6 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
+import copy
import llnl.util.tty as tty
import llnl.util.tty.color as color
@@ -166,11 +167,16 @@ def display_env(env, args, decorator):
else:
tty.msg('Root specs')
+ # Root specs cannot be displayed with prefixes, since those are not
+ # set for abstract specs. Same for hashes
+ root_args = copy.copy(args)
+ root_args.paths = False
+
# Roots are displayed with variants, etc. so that we can see
# specifically what the user asked for.
cmd.display_specs(
env.user_specs,
- args,
+ root_args,
decorator=lambda s, f: color.colorize('@*{%s}' % f),
namespace=True,
show_flags=True,
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index ab012eaead..f39522ff59 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -72,7 +72,7 @@ the dependencies"""
subparser.add_argument(
'-u', '--until', type=str, dest='until', default=None,
help="phase to stop after when installing (default None)")
- arguments.add_common_arguments(subparser, ['jobs', 'install_status'])
+ arguments.add_common_arguments(subparser, ['jobs'])
subparser.add_argument(
'--overwrite', action='store_true',
help="reinstall an existing spec, even if it has dependents")
@@ -151,38 +151,62 @@ packages. If neither are chosen, don't run tests for any packages."""
help="filename for the log file. if not passed a default will be used"
)
subparser.add_argument(
+ '--help-cdash',
+ action='store_true',
+ help="Show usage instructions for CDash reporting"
+ )
+ add_cdash_args(subparser, False)
+ arguments.add_common_arguments(subparser, ['yes_to_all'])
+
+
+def add_cdash_args(subparser, add_help):
+ cdash_help = {}
+ if add_help:
+ cdash_help['upload-url'] = "CDash URL where reports will be uploaded"
+ cdash_help['build'] = """The name of the build that will be reported to CDash.
+Defaults to spec of the package to install."""
+ cdash_help['site'] = """The site name that will be reported to CDash.
+Defaults to current system hostname."""
+ cdash_help['track'] = """Results will be reported to this group on CDash.
+Defaults to Experimental."""
+ cdash_help['buildstamp'] = """Instead of letting the CDash reporter prepare the
+buildstamp which, when combined with build name, site and project,
+uniquely identifies the build, provide this argument to identify
+the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
+ else:
+ cdash_help['upload-url'] = argparse.SUPPRESS
+ cdash_help['build'] = argparse.SUPPRESS
+ cdash_help['site'] = argparse.SUPPRESS
+ cdash_help['track'] = argparse.SUPPRESS
+ cdash_help['buildstamp'] = argparse.SUPPRESS
+
+ subparser.add_argument(
'--cdash-upload-url',
default=None,
- help="CDash URL where reports will be uploaded"
+ help=cdash_help['upload-url']
)
subparser.add_argument(
'--cdash-build',
default=None,
- help="""The name of the build that will be reported to CDash.
-Defaults to spec of the package to install."""
+ help=cdash_help['build']
)
subparser.add_argument(
'--cdash-site',
default=None,
- help="""The site name that will be reported to CDash.
-Defaults to current system hostname."""
+ help=cdash_help['site']
)
+
cdash_subgroup = subparser.add_mutually_exclusive_group()
cdash_subgroup.add_argument(
'--cdash-track',
default='Experimental',
- help="""Results will be reported to this group on CDash.
-Defaults to Experimental."""
+ help=cdash_help['track']
)
cdash_subgroup.add_argument(
'--cdash-buildstamp',
default=None,
- help="""Instead of letting the CDash reporter prepare the
-buildstamp which, when combined with build name, site and project,
-uniquely identifies the build, provide this argument to identify
-the build yourself. Format: %%Y%%m%%d-%%H%%M-[cdash-track]"""
+ help=cdash_help['buildstamp']
)
- arguments.add_common_arguments(subparser, ['yes_to_all'])
def default_log_file(spec):
@@ -221,6 +245,12 @@ def install_spec(cli_args, kwargs, abstract_spec, spec):
def install(parser, args, **kwargs):
+ if args.help_cdash:
+ parser = argparse.ArgumentParser()
+ add_cdash_args(parser, True)
+ parser.print_help()
+ return
+
if not args.package and not args.specfiles:
# if there are no args but an active environment or spack.yaml file
# then install the packages from it.
diff --git a/lib/spack/spack/cmd/url.py b/lib/spack/spack/cmd/url.py
index a24fcc575c..0c105c65c3 100644
--- a/lib/spack/spack/cmd/url.py
+++ b/lib/spack/spack/cmd/url.py
@@ -135,7 +135,7 @@ def url_list(args):
# Gather set of URLs from all packages
for pkg in spack.repo.path.all_packages():
- url = getattr(pkg.__class__, 'url', None)
+ url = getattr(pkg, 'url', None)
urls = url_list_parsing(args, urls, url, pkg)
for params in pkg.versions.values():
@@ -174,7 +174,7 @@ def url_summary(args):
for pkg in spack.repo.path.all_packages():
urls = set()
- url = getattr(pkg.__class__, 'url', None)
+ url = getattr(pkg, 'url', None)
if url:
urls.add(url)
@@ -434,23 +434,10 @@ def name_parsed_correctly(pkg, name):
Returns:
bool: True if the name was correctly parsed, else False
"""
- pkg_name = pkg.name
+ pkg_name = remove_prefix(pkg.name)
name = simplify_name(name)
- # After determining a name, `spack create` determines a build system.
- # Some build systems prepend a special string to the front of the name.
- # Since this can't be guessed from the URL, it would be unfair to say
- # that these names are incorrectly parsed, so we remove them.
- if pkg_name.startswith('r-'):
- pkg_name = pkg_name[2:]
- elif pkg_name.startswith('py-'):
- pkg_name = pkg_name[3:]
- elif pkg_name.startswith('perl-'):
- pkg_name = pkg_name[5:]
- elif pkg_name.startswith('octave-'):
- pkg_name = pkg_name[7:]
-
return name == pkg_name
@@ -475,8 +462,32 @@ def version_parsed_correctly(pkg, version):
return False
+def remove_prefix(pkg_name):
+ """Remove build system prefix ('py-', 'perl-', etc.) from a package name.
+
+ After determining a name, `spack create` determines a build system.
+ Some build systems prepend a special string to the front of the name.
+ Since this can't be guessed from the URL, it would be unfair to say
+ that these names are incorrectly parsed, so we remove them.
+
+ Args:
+ pkg_name (str): the name of the package
+
+ Returns:
+ str: the name of the package with any build system prefix removed
+ """
+ prefixes = [
+ 'r-', 'py-', 'tcl-', 'lua-', 'perl-', 'ruby-', 'llvm-',
+ 'intel-', 'votca-', 'octave-', 'gtkorvo-'
+ ]
+
+ prefix = next((p for p in prefixes if pkg_name.startswith(p)), '')
+
+ return pkg_name[len(prefix):]
+
+
def remove_separators(version):
- """Removes separator characters ('.', '_', and '-') from a version.
+ """Remove separator characters ('.', '_', and '-') from a version.
A version like 1.2.3 may be displayed as 1_2_3 in the URL.
Make sure 1.2.3, 1-2-3, 1_2_3, and 123 are considered equal.
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index f2b62dc3f9..1a6b33af0c 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -32,7 +32,7 @@ def _verify_executables(*paths):
@llnl.util.lang.memoized
-def get_compiler_version_output(compiler_path, version_arg):
+def get_compiler_version_output(compiler_path, version_arg, ignore_errors=()):
"""Invokes the compiler at a given path passing a single
version argument and returns the output.
@@ -41,7 +41,8 @@ def get_compiler_version_output(compiler_path, version_arg):
version_arg (str): the argument used to extract version information
"""
compiler = spack.util.executable.Executable(compiler_path)
- output = compiler(version_arg, output=str, error=str)
+ output = compiler(
+ version_arg, output=str, error=str, ignore_errors=ignore_errors)
return output
@@ -199,6 +200,9 @@ class Compiler(object):
#: Compiler argument that produces version information
version_argument = '-dumpversion'
+ #: Return values to ignore when invoking the compiler to get its version
+ ignore_version_errors = ()
+
#: Regex used to extract version from compiler's output
version_regex = '(.*)'
@@ -412,7 +416,8 @@ class Compiler(object):
@classmethod
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
- output = get_compiler_version_output(cc, cls.version_argument)
+ output = get_compiler_version_output(
+ cc, cls.version_argument, tuple(cls.ignore_version_errors))
return cls.extract_version_from_output(output)
@classmethod
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index 90560f7c63..2798210bca 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -30,6 +30,7 @@ class Pgi(Compiler):
PrgEnv_compiler = 'pgi'
version_argument = '-V'
+ ignore_version_errors = [2] # `pgcc -V` on PowerPC annoyingly returns 2
version_regex = r'pg[^ ]* ([0-9.]+)-[0-9]+ (LLVM )?[^ ]+ target on '
@classmethod
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index bf9af075ce..b0d17877dc 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -825,7 +825,10 @@ class Environment(object):
raise SpackEnvironmentError(
'cannot add anonymous specs to an environment!')
elif not spack.repo.path.exists(spec.name):
- raise SpackEnvironmentError('no such package: %s' % spec.name)
+ virtuals = spack.repo.path.provider_index.providers.keys()
+ if spec.name not in virtuals:
+ msg = 'no such package: %s' % spec.name
+ raise SpackEnvironmentError(msg)
list_to_change = self.spec_lists[list_name]
existing = str(spec) in list_to_change.yaml_list
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 5a57703d27..f1ea0d35b6 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -22,33 +22,30 @@ in order to build it. They need to define the following methods:
* archive()
Archive a source directory, e.g. for creating a mirror.
"""
+import copy
+import functools
import os
import os.path
-import sys
import re
import shutil
-import copy
+import sys
import xml.etree.ElementTree
-from functools import wraps
-from six import string_types, with_metaclass
-import six.moves.urllib.parse as urllib_parse
import llnl.util.tty as tty
-from llnl.util.filesystem import (
- working_dir, mkdirp, temp_rename, temp_cwd, get_single_file)
-
+import six
+import six.moves.urllib.parse as urllib_parse
import spack.config
import spack.error
import spack.util.crypto as crypto
import spack.util.pattern as pattern
-import spack.util.web as web_util
import spack.util.url as url_util
-
+import spack.util.web as web_util
+from llnl.util.filesystem import (
+ working_dir, mkdirp, temp_rename, temp_cwd, get_single_file)
+from spack.util.compression import decompressor_for, extension
from spack.util.executable import which
from spack.util.string import comma_and, quote
from spack.version import Version, ver
-from spack.util.compression import decompressor_for, extension
-
#: List of all fetch strategies, created by FetchStrategy metaclass.
all_strategies = []
@@ -69,7 +66,7 @@ def _needs_stage(fun):
"""Many methods on fetch strategies require a stage to be set
using set_stage(). This decorator adds a check for self.stage."""
- @wraps(fun)
+ @functools.wraps(fun)
def wrapper(self, *args, **kwargs):
if not self.stage:
raise NoStageError(fun)
@@ -85,18 +82,14 @@ def _ensure_one_stage_entry(stage_path):
return os.path.join(stage_path, stage_entries[0])
-class FSMeta(type):
- """This metaclass registers all fetch strategies in a list."""
- def __init__(cls, name, bases, dict):
- type.__init__(cls, name, bases, dict)
- if cls.enabled:
- all_strategies.append(cls)
+def fetcher(cls):
+ """Decorator used to register fetch strategies."""
+ all_strategies.append(cls)
+ return cls
-class FetchStrategy(with_metaclass(FSMeta, object)):
+class FetchStrategy(object):
"""Superclass of all fetch strategies."""
- enabled = False # Non-abstract subclasses should be enabled.
-
#: The URL attribute must be specified either at the package class
#: level, or as a keyword argument to ``version()``. It is used to
#: distinguish fetchers for different versions in the package DSL.
@@ -113,16 +106,7 @@ class FetchStrategy(with_metaclass(FSMeta, object)):
self.stage = None
# Enable or disable caching for this strategy based on
# 'no_cache' option from version directive.
- self._cache_enabled = not kwargs.pop('no_cache', False)
-
- def set_stage(self, stage):
- """This is called by Stage before any of the fetching
- methods are called on the stage."""
- self.stage = stage
-
- @property
- def cache_enabled(self):
- return self._cache_enabled
+ self.cache_enabled = not kwargs.pop('no_cache', False)
# Subclasses need to implement these methods
def fetch(self):
@@ -186,13 +170,18 @@ class FetchStrategy(with_metaclass(FSMeta, object)):
def __str__(self): # Should be human readable URL.
return "FetchStrategy.__str___"
- # This method is used to match fetch strategies to version()
- # arguments in packages.
@classmethod
def matches(cls, args):
+ """Predicate that matches fetch strategies to arguments of
+ the version directive.
+
+ Args:
+ args: arguments of the version directive
+ """
return cls.url_attr in args
+@fetcher
class BundleFetchStrategy(FetchStrategy):
"""
Fetch strategy associated with bundle, or no-code, packages.
@@ -204,9 +193,6 @@ class BundleFetchStrategy(FetchStrategy):
TODO: Remove this class by refactoring resource handling and the link
between composite stages and composite fetch strategies (see #11981).
"""
- #: This is a concrete fetch strategy for no-code packages.
- enabled = True
-
#: There is no associated URL keyword in ``version()`` for no-code
#: packages but this property is required for some strategy-related
#: functions (e.g., check_pkg_attributes).
@@ -225,6 +211,9 @@ class BundleFetchStrategy(FetchStrategy):
"""BundlePackages don't have a source id."""
return ''
+ def mirror_id(self):
+ """BundlePackages don't have a mirror id."""
+
@pattern.composite(interface=FetchStrategy)
class FetchStrategyComposite(object):
@@ -233,7 +222,6 @@ class FetchStrategyComposite(object):
Implements the GoF composite pattern.
"""
matches = FetchStrategy.matches
- set_stage = FetchStrategy.set_stage
def source_id(self):
component_ids = tuple(i.source_id() for i in self)
@@ -241,13 +229,13 @@ class FetchStrategyComposite(object):
return component_ids
+@fetcher
class URLFetchStrategy(FetchStrategy):
+ """URLFetchStrategy pulls source code from a URL for an archive, check the
+ archive against a checksum, and decompresses the archive.
+
+ The destination for the resulting file(s) is the standard stage path.
"""
- FetchStrategy that pulls source code from a URL for an archive, check the
- archive against a checksum, and decompresses the archive. The destination
- for the resulting file(s) is the standard stage source path.
- """
- enabled = True
url_attr = 'url'
# these are checksum types. The generic 'checksum' is deprecated for
@@ -259,6 +247,7 @@ class URLFetchStrategy(FetchStrategy):
# Prefer values in kwargs to the positionals.
self.url = kwargs.get('url', url)
+ self.mirrors = kwargs.get('mirrors', [])
# digest can be set as the first argument, or from an explicit
# kwarg by the hash name.
@@ -294,20 +283,36 @@ class URLFetchStrategy(FetchStrategy):
return os.path.sep.join(
['archive', self.digest[:2], self.digest])
+ @property
+ def candidate_urls(self):
+ return [self.url] + (self.mirrors or [])
+
@_needs_stage
def fetch(self):
if self.archive_file:
tty.msg("Already downloaded %s" % self.archive_file)
return
+ for url in self.candidate_urls:
+ try:
+ partial_file, save_file = self._fetch_from_url(url)
+ if save_file:
+ os.rename(partial_file, save_file)
+ break
+ except FetchError as e:
+ tty.msg(str(e))
+ pass
+
+ if not self.archive_file:
+ raise FailedDownloadError(self.url)
+
+ def _fetch_from_url(self, url):
save_file = None
partial_file = None
if self.stage.save_filename:
save_file = self.stage.save_filename
partial_file = self.stage.save_filename + '.part'
-
- tty.msg("Fetching %s" % self.url)
-
+ tty.msg("Fetching %s" % url)
if partial_file:
save_args = ['-C',
'-', # continue partial downloads
@@ -321,7 +326,9 @@ class URLFetchStrategy(FetchStrategy):
'-D',
'-', # print out HTML headers
'-L', # resolve 3xx redirects
- self.url,
+ # Timeout if can't establish a connection after 10 sec.
+ '--connect-timeout', '10',
+ url,
]
if not spack.config.get('config:verify_ssl'):
@@ -377,12 +384,7 @@ class URLFetchStrategy(FetchStrategy):
flags=re.IGNORECASE)
if content_types and 'text/html' in content_types[-1]:
warn_content_type_mismatch(self.archive_file or "the archive")
-
- if save_file:
- os.rename(partial_file, save_file)
-
- if not self.archive_file:
- raise FailedDownloadError(self.url)
+ return partial_file, save_file
@property
@_needs_stage
@@ -392,7 +394,7 @@ class URLFetchStrategy(FetchStrategy):
@property
def cachable(self):
- return self._cache_enabled and bool(self.digest)
+ return self.cache_enabled and bool(self.digest)
@_needs_stage
def expand(self):
@@ -519,6 +521,7 @@ class URLFetchStrategy(FetchStrategy):
return "[no url]"
+@fetcher
class CacheURLFetchStrategy(URLFetchStrategy):
"""The resource associated with a cache URL may be out of date."""
@@ -594,7 +597,7 @@ class VCSFetchStrategy(FetchStrategy):
patterns = kwargs.get('exclude', None)
if patterns is not None:
- if isinstance(patterns, string_types):
+ if isinstance(patterns, six.string_types):
patterns = [patterns]
for p in patterns:
tar.add_default_arg('--exclude=%s' % p)
@@ -618,6 +621,7 @@ class VCSFetchStrategy(FetchStrategy):
return "%s<%s>" % (self.__class__, self.url)
+@fetcher
class GoFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that employs the `go get` infrastructure.
@@ -631,7 +635,6 @@ class GoFetchStrategy(VCSFetchStrategy):
The fetched source will be moved to the standard stage sourcepath directory
during the expand step.
"""
- enabled = True
url_attr = 'go'
def __init__(self, **kwargs):
@@ -688,6 +691,7 @@ class GoFetchStrategy(VCSFetchStrategy):
return "[go] %s" % self.url
+@fetcher
class GitFetchStrategy(VCSFetchStrategy):
"""
@@ -709,7 +713,6 @@ class GitFetchStrategy(VCSFetchStrategy):
Repositories are cloned into the standard stage source path directory.
"""
- enabled = True
url_attr = 'git'
optional_attrs = ['tag', 'branch', 'commit', 'submodules', 'get_full_repo']
@@ -743,7 +746,7 @@ class GitFetchStrategy(VCSFetchStrategy):
@property
def cachable(self):
- return self._cache_enabled and bool(self.commit or self.tag)
+ return self.cache_enabled and bool(self.commit or self.tag)
def source_id(self):
return self.commit or self.tag
@@ -889,6 +892,7 @@ class GitFetchStrategy(VCSFetchStrategy):
return '[git] {0}'.format(self._repo_info())
+@fetcher
class SvnFetchStrategy(VCSFetchStrategy):
"""Fetch strategy that gets source code from a subversion repository.
@@ -903,7 +907,6 @@ class SvnFetchStrategy(VCSFetchStrategy):
Repositories are checked out into the standard stage source path directory.
"""
- enabled = True
url_attr = 'svn'
optional_attrs = ['revision']
@@ -926,7 +929,7 @@ class SvnFetchStrategy(VCSFetchStrategy):
@property
def cachable(self):
- return self._cache_enabled and bool(self.revision)
+ return self.cache_enabled and bool(self.revision)
def source_id(self):
return self.revision
@@ -988,6 +991,7 @@ class SvnFetchStrategy(VCSFetchStrategy):
return "[svn] %s" % self.url
+@fetcher
class HgFetchStrategy(VCSFetchStrategy):
"""
@@ -1010,7 +1014,6 @@ class HgFetchStrategy(VCSFetchStrategy):
Repositories are cloned into the standard stage source path directory.
"""
- enabled = True
url_attr = 'hg'
optional_attrs = ['revision']
@@ -1040,7 +1043,7 @@ class HgFetchStrategy(VCSFetchStrategy):
@property
def cachable(self):
- return self._cache_enabled and bool(self.revision)
+ return self.cache_enabled and bool(self.revision)
def source_id(self):
return self.revision
@@ -1105,9 +1108,9 @@ class HgFetchStrategy(VCSFetchStrategy):
return "[hg] %s" % self.url
+@fetcher
class S3FetchStrategy(URLFetchStrategy):
"""FetchStrategy that pulls from an S3 bucket."""
- enabled = True
url_attr = 's3'
def __init__(self, *args, **kwargs):
@@ -1139,7 +1142,7 @@ class S3FetchStrategy(URLFetchStrategy):
with open(basename, 'wb') as f:
shutil.copyfileobj(stream, f)
- content_type = headers['Content-type']
+ content_type = web_util.get_header(headers, 'Content-type')
if content_type == 'text/html':
warn_content_type_mismatch(self.archive_file or "the archive")
@@ -1241,10 +1244,15 @@ def _from_merged_attrs(fetcher, pkg, version):
"""Create a fetcher from merged package and version attributes."""
if fetcher.url_attr == 'url':
url = pkg.url_for_version(version)
+ # TODO: refactor this logic into its own method or function
+ # TODO: to avoid duplication
+ mirrors = [spack.url.substitute_version(u, version)
+ for u in getattr(pkg, 'urls', [])]
+ attrs = {fetcher.url_attr: url, 'mirrors': mirrors}
else:
url = getattr(pkg, fetcher.url_attr)
+ attrs = {fetcher.url_attr: url}
- attrs = {fetcher.url_attr: url}
attrs.update(pkg.versions[version])
return fetcher(**attrs)
@@ -1308,7 +1316,9 @@ def from_url_scheme(url, *args, **kwargs):
{
'file': 'url',
'http': 'url',
- 'https': 'url'
+ 'https': 'url',
+ 'ftp': 'url',
+ 'ftps': 'url',
})
scheme = parsed_url.scheme
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index da9b20472a..ceb53801ce 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -21,6 +21,8 @@ import six
import ruamel.yaml.error as yaml_error
+from ordereddict_backport import OrderedDict
+
try:
from collections.abc import Mapping
except ImportError:
@@ -166,7 +168,7 @@ class MirrorCollection(Mapping):
"""A mapping of mirror names to mirrors."""
def __init__(self, mirrors=None, scope=None):
- self._mirrors = dict(
+ self._mirrors = OrderedDict(
(name, Mirror.from_dict(mirror, name))
for name, mirror in (
mirrors.items() if mirrors is not None else
@@ -178,6 +180,7 @@ class MirrorCollection(Mapping):
def to_yaml(self, stream=None):
return syaml.dump(self.to_dict(True), stream)
+ # TODO: this isn't called anywhere
@staticmethod
def from_yaml(stream, name=None):
try:
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 26335ed2ff..f48f296548 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -510,8 +510,8 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
maintainers = []
#: List of attributes to be excluded from a package's hash.
- metadata_attrs = ['homepage', 'url', 'list_url', 'extendable', 'parallel',
- 'make_jobs']
+ metadata_attrs = ['homepage', 'url', 'urls', 'list_url', 'extendable',
+ 'parallel', 'make_jobs']
def __init__(self, spec):
# this determines how the package should be built.
@@ -524,6 +524,12 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
# a binary cache.
self.installed_from_binary_cache = False
+ # Ensure that only one of these two attributes are present
+ if getattr(self, 'url', None) and getattr(self, 'urls', None):
+ msg = "a package can have either a 'url' or a 'urls' attribute"
+ msg += " [package '{0.name}' defines both]"
+ raise ValueError(msg.format(self))
+
# Set a default list URL (place to find available versions)
if not hasattr(self, 'list_url'):
self.list_url = None
@@ -556,16 +562,19 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
@classmethod
def possible_dependencies(
cls, transitive=True, expand_virtuals=True, deptype='all',
- visited=None):
+ visited=None, missing=None):
"""Return dict of possible dependencies of this package.
Args:
- transitive (bool): return all transitive dependencies if True,
- only direct dependencies if False.
- expand_virtuals (bool): expand virtual dependencies into all
- possible implementations.
- deptype (str or tuple): dependency types to consider
- visited (set): set of names of dependencies visited so far.
+ transitive (bool, optional): return all transitive dependencies if
+ True, only direct dependencies if False (default True)..
+ expand_virtuals (bool, optional): expand virtual dependencies into
+ all possible implementations (default True)
+ deptype (str or tuple, optional): dependency types to consider
+ visited (dicct, optional): dict of names of dependencies visited so
+ far, mapped to their immediate dependencies' names.
+ missing (dict, optional): dict to populate with packages and their
+ *missing* dependencies.
Returns:
(dict): dictionary mapping dependency names to *their*
@@ -576,7 +585,12 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
*immediate* dependencies. If ``expand_virtuals`` is ``False``,
virtual package names wil be inserted as keys mapped to empty
sets of dependencies. Virtuals, if not expanded, are treated as
- though they have no immediate dependencies
+ though they have no immediate dependencies.
+
+ Missing dependencies by default are ignored, but if a
+ missing dict is provided, it will be populated with package names
+ mapped to any dependencies they have that are in no
+ repositories. This is only populated if transitive is True.
Note: the returned dict *includes* the package itself.
@@ -586,6 +600,9 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
if visited is None:
visited = {cls.name: set()}
+ if missing is None:
+ missing = {cls.name: set()}
+
for name, conditions in cls.dependencies.items():
# check whether this dependency could be of the type asked for
types = [dep.type for cond, dep in conditions.items()]
@@ -609,12 +626,24 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
# recursively traverse dependencies
for dep_name in dep_names:
- if dep_name not in visited:
- visited.setdefault(dep_name, set())
- if transitive:
- dep_cls = spack.repo.path.get_pkg_class(dep_name)
- dep_cls.possible_dependencies(
- transitive, expand_virtuals, deptype, visited)
+ if dep_name in visited:
+ continue
+
+ visited.setdefault(dep_name, set())
+
+ # skip the rest if not transitive
+ if not transitive:
+ continue
+
+ try:
+ dep_cls = spack.repo.path.get_pkg_class(dep_name)
+ except spack.repo.UnknownPackageError:
+ # log unknown packages
+ missing.setdefault(cls.name, set()).add(dep_name)
+ continue
+
+ dep_cls.possible_dependencies(
+ transitive, expand_virtuals, deptype, visited, missing)
return visited
@@ -727,7 +756,9 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
return version_urls[version]
# If no specific URL, use the default, class-level URL
- default_url = getattr(self, 'url', None)
+ url = getattr(self, 'url', None)
+ urls = getattr(self, 'urls', [None])
+ default_url = url or urls.pop(0)
# if no exact match AND no class-level default, use the nearest URL
if not default_url:
@@ -1509,6 +1540,7 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
)
if not compilers:
dep = spack.compilers.pkg_spec_for_compiler(self.spec.compiler)
+ dep.architecture = self.spec.architecture
# concrete CompilerSpec has less info than concrete Spec
# concretize as Spec to add that information
dep.concretize()
@@ -2662,6 +2694,35 @@ def dump_packages(spec, path):
spack.repo.path.dump_provenance(node, dest_pkg_dir)
+def possible_dependencies(*pkg_or_spec, **kwargs):
+ """Get the possible dependencies of a number of packages.
+
+ See ``PackageBase.possible_dependencies`` for details.
+ """
+ transitive = kwargs.get('transitive', True)
+ expand_virtuals = kwargs.get('expand_virtuals', True)
+ deptype = kwargs.get('deptype', 'all')
+ missing = kwargs.get('missing')
+
+ packages = []
+ for pos in pkg_or_spec:
+ if isinstance(pos, PackageMeta):
+ pkg = pos
+ elif isinstance(pos, spack.spec.Spec):
+ pkg = pos.package
+ else:
+ pkg = spack.spec.Spec(pos).package
+
+ packages.append(pkg)
+
+ visited = {}
+ for pkg in packages:
+ pkg.possible_dependencies(
+ transitive, expand_virtuals, deptype, visited, missing)
+
+ return visited
+
+
def print_pkg(message):
"""Outputs a message with a package icon."""
from llnl.util.tty.color import cwrite
diff --git a/lib/spack/spack/pkgkit.py b/lib/spack/spack/pkgkit.py
index 7ad7279e73..2ed16cff0a 100644
--- a/lib/spack/spack/pkgkit.py
+++ b/lib/spack/spack/pkgkit.py
@@ -30,6 +30,7 @@ from spack.build_systems.perl import PerlPackage
from spack.build_systems.intel import IntelPackage
from spack.build_systems.meson import MesonPackage
from spack.build_systems.sip import SIPPackage
+from spack.build_systems.gnu import GNUMirrorPackage
from spack.mixins import filter_compiler_wrappers
diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py
index 434d681035..d899b372e7 100644
--- a/lib/spack/spack/relocate.py
+++ b/lib/spack/spack/relocate.py
@@ -6,6 +6,7 @@
import os
import re
+import shutil
import platform
import spack.repo
import spack.cmd
@@ -86,7 +87,14 @@ def get_existing_elf_rpaths(path_name):
Return the RPATHS returned by patchelf --print-rpath path_name
as a list of strings.
"""
- patchelf = Executable(get_patchelf())
+
+ # if we're relocating patchelf itself, use it
+
+ if path_name[-13:] == "/bin/patchelf":
+ patchelf = Executable(path_name)
+ else:
+ patchelf = Executable(get_patchelf())
+
try:
output = patchelf('--print-rpath', '%s' %
path_name, output=str, error=str)
@@ -326,8 +334,18 @@ def modify_elf_object(path_name, new_rpaths):
"""
Replace orig_rpath with new_rpath in RPATH of elf object path_name
"""
+
new_joined = ':'.join(new_rpaths)
- patchelf = Executable(get_patchelf())
+
+ # if we're relocating patchelf itself, use it
+
+ if path_name[-13:] == "/bin/patchelf":
+ bak_path = path_name + ".bak"
+ shutil.copy(path_name, bak_path)
+ patchelf = Executable(bak_path)
+ else:
+ patchelf = Executable(get_patchelf())
+
try:
patchelf('--force-rpath', '--set-rpath', '%s' % new_joined,
'%s' % path_name, output=str, error=str)
@@ -665,7 +683,13 @@ def file_is_relocatable(file, paths_to_relocate=None):
raise ValueError('{0} is not an absolute path'.format(file))
strings = Executable('strings')
- patchelf = Executable(get_patchelf())
+
+ # if we're relocating patchelf itself, use it
+
+ if file[-13:] == "/bin/patchelf":
+ patchelf = Executable(file)
+ else:
+ patchelf = Executable(get_patchelf())
# Remove the RPATHS from the strings in the executable
set_of_strings = set(strings(file, output=str).split())
diff --git a/lib/spack/spack/reporters/cdash.py b/lib/spack/spack/reporters/cdash.py
index 58095b10ba..592209aca4 100644
--- a/lib/spack/spack/reporters/cdash.py
+++ b/lib/spack/spack/reporters/cdash.py
@@ -302,7 +302,10 @@ class CDash(Reporter):
request.get_method = lambda: 'PUT'
response = opener.open(request)
if self.current_package_name not in self.buildIds:
- match = self.buildid_regexp.search(response.read())
+ resp_value = response.read()
+ if isinstance(resp_value, bytes):
+ resp_value = resp_value.decode('utf-8')
+ match = self.buildid_regexp.search(resp_value)
if match:
buildid = match.group(1)
self.buildIds[self.current_package_name] = buildid
diff --git a/lib/spack/spack/s3_handler.py b/lib/spack/spack/s3_handler.py
index 2a54b9ecb1..ea7f0673ff 100644
--- a/lib/spack/spack/s3_handler.py
+++ b/lib/spack/spack/s3_handler.py
@@ -11,7 +11,6 @@ import six.moves.urllib.error as urllib_error
import spack.util.s3 as s3_util
import spack.util.url as url_util
-import spack.util.web as web_util
# NOTE(opadron): Workaround issue in boto where its StreamingBody
@@ -54,8 +53,7 @@ def _s3_open(url):
# NOTE(opadron): Apply workaround here (see above)
stream = WrapStream(obj['Body'])
- headers = web_util.standardize_header_names(
- obj['ResponseMetadata']['HTTPHeaders'])
+ headers = obj['ResponseMetadata']['HTTPHeaders']
return url, headers, stream
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index c553da796d..57d05d6166 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -2182,7 +2182,7 @@ class Spec(object):
# Add any patches from the package to the spec.
patches = []
for cond, patch_list in s.package_class.patches.items():
- if s.satisfies(cond):
+ if s.satisfies(cond, strict=True):
for patch in patch_list:
patches.append(patch)
if patches:
@@ -2201,7 +2201,7 @@ class Spec(object):
patches = []
for cond, dependency in pkg_deps[dspec.spec.name].items():
- if dspec.parent.satisfies(cond):
+ if dspec.parent.satisfies(cond, strict=True):
for pcond, patch_list in dependency.patches.items():
if dspec.spec.satisfies(pcond):
for patch in patch_list:
@@ -2663,7 +2663,7 @@ class Spec(object):
not_existing = set(spec.variants) - (
set(pkg_variants) | set(spack.directives.reserved_names))
if not_existing:
- raise UnknownVariantError(spec.name, not_existing)
+ raise UnknownVariantError(spec, not_existing)
substitute_abstract_variants(spec)
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index bf65ee0b01..6f98edc674 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -3,6 +3,8 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from __future__ import print_function
+
import os
import stat
import sys
@@ -269,7 +271,7 @@ class Stage(object):
else:
raise ValueError(
"Can't construct Stage without url or fetch strategy")
- self.fetcher.set_stage(self)
+ self.fetcher.stage = self
# self.fetcher can change with mirrors.
self.default_fetcher = self.fetcher
self.search_fn = search_fn
@@ -456,7 +458,7 @@ class Stage(object):
for fetcher in generate_fetchers():
try:
- fetcher.set_stage(self)
+ fetcher.stage = self
self.fetcher = fetcher
self.fetcher.fetch()
break
@@ -771,7 +773,7 @@ def get_checksums_for_versions(
*spack.cmd.elide_list(
["{0:{1}} {2}".format(str(v), max_len, url_dict[v])
for v in sorted_versions]))
- tty.msg('')
+ print()
archives_to_fetch = tty.get_number(
"How many would you like to checksum?", default=1, abort='q')
@@ -818,7 +820,7 @@ def get_checksums_for_versions(
])
num_hash = len(version_hashes)
- tty.msg("Checksummed {0} version{1} of {2}".format(
+ tty.msg("Checksummed {0} version{1} of {2}:".format(
num_hash, '' if num_hash == 1 else 's', name))
return version_lines
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
index 0a37ef9558..af8e92809d 100644
--- a/lib/spack/spack/test/architecture.py
+++ b/lib/spack/spack/test/architecture.py
@@ -176,7 +176,7 @@ def test_arch_spec_container_semantic(item, architecture_str):
('gcc@4.7.2', 'ivybridge', '-march=core-avx-i -mtune=core-avx-i'),
# Check mixed toolchains
('clang@8.0.0', 'broadwell', ''),
- ('clang@3.5', 'x86_64', '-march=x86-64 -mcpu=generic'),
+ ('clang@3.5', 'x86_64', '-march=x86-64 -mtune=generic'),
# Check clang compilers with 'apple' suffix
('clang@9.1.0-apple', 'x86_64', '-march=x86-64')
])
diff --git a/lib/spack/spack/test/build_distribution.py b/lib/spack/spack/test/build_distribution.py
new file mode 100644
index 0000000000..9d127ddf45
--- /dev/null
+++ b/lib/spack/spack/test/build_distribution.py
@@ -0,0 +1,41 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import pytest
+
+import os
+import os.path
+
+import spack.spec
+import spack.binary_distribution
+
+install = spack.main.SpackCommand('install')
+
+
+def test_build_tarball_overwrite(
+ install_mockery, mock_fetch, monkeypatch, tmpdir):
+
+ with tmpdir.as_cwd():
+ spec = spack.spec.Spec('trivial-install-test-package').concretized()
+ install(str(spec))
+
+ # Runs fine the first time, throws the second time
+ spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
+ with pytest.raises(spack.binary_distribution.NoOverwriteException):
+ spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
+
+ # Should work fine with force=True
+ spack.binary_distribution.build_tarball(
+ spec, '.', force=True, unsigned=True)
+
+ # Remove the tarball and try again.
+ # This must *also* throw, because of the existing .spec.yaml file
+ os.remove(os.path.join(
+ spack.binary_distribution.build_cache_prefix('.'),
+ spack.binary_distribution.tarball_directory_name(spec),
+ spack.binary_distribution.tarball_name(spec, '.spack')))
+
+ with pytest.raises(spack.binary_distribution.NoOverwriteException):
+ spack.binary_distribution.build_tarball(spec, '.', unsigned=True)
diff --git a/lib/spack/spack/test/build_environment.py b/lib/spack/spack/test/build_environment.py
index b6a0f4b441..f2427827fe 100644
--- a/lib/spack/spack/test/build_environment.py
+++ b/lib/spack/spack/test/build_environment.py
@@ -17,7 +17,7 @@ from spack.util.executable import Executable
from spack.util.spack_yaml import syaml_dict, syaml_str
from spack.util.environment import EnvironmentModifications
-from llnl.util.filesystem import LibraryList
+from llnl.util.filesystem import LibraryList, HeaderList
@pytest.fixture
@@ -243,6 +243,18 @@ def test_set_build_environment_variables(
variables.
"""
+ # https://github.com/spack/spack/issues/13969
+ cuda_headers = HeaderList([
+ 'prefix/include/cuda_runtime.h',
+ 'prefix/include/cuda/atomic',
+ 'prefix/include/cuda/std/detail/libcxx/include/ctype.h'])
+ cuda_include_dirs = cuda_headers.directories
+ assert(os.path.join('prefix', 'include')
+ in cuda_include_dirs)
+ assert(os.path.join('prefix', 'include', 'cuda', 'std', 'detail',
+ 'libcxx', 'include')
+ not in cuda_include_dirs)
+
root = spack.spec.Spec('dt-diamond')
root.concretize()
diff --git a/lib/spack/spack/test/cmd/__init__.py b/lib/spack/spack/test/cmd/__init__.py
new file mode 100644
index 0000000000..94f8ac4d9e
--- /dev/null
+++ b/lib/spack/spack/test/cmd/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py
index 2c4f351d86..28f0fb9aec 100644
--- a/lib/spack/spack/test/cmd/buildcache.py
+++ b/lib/spack/spack/test/cmd/buildcache.py
@@ -8,11 +8,19 @@ import platform
import pytest
import spack.main
-
+import spack.binary_distribution
buildcache = spack.main.SpackCommand('buildcache')
+@pytest.fixture()
+def mock_get_specs(database, monkeypatch):
+ specs = database.query_local()
+ monkeypatch.setattr(
+ spack.binary_distribution, 'get_specs', lambda x: specs
+ )
+
+
@pytest.mark.skipif(
platform.system().lower() != 'linux',
reason='implementation for MacOS still missing'
@@ -20,3 +28,16 @@ buildcache = spack.main.SpackCommand('buildcache')
@pytest.mark.db
def test_buildcache_preview_just_runs(database):
buildcache('preview', 'mpileaks')
+
+
+@pytest.mark.skipif(
+ platform.system().lower() != 'linux',
+ reason='implementation for MacOS still missing'
+)
+@pytest.mark.db
+@pytest.mark.regression('13757')
+def test_buildcache_list_duplicates(mock_get_specs, capsys):
+ with capsys.disabled():
+ output = buildcache('list', 'mpileaks', '@2.3')
+
+ assert output.count('mpileaks') == 3
diff --git a/lib/spack/spack/test/cmd/common/__init__.py b/lib/spack/spack/test/cmd/common/__init__.py
new file mode 100644
index 0000000000..94f8ac4d9e
--- /dev/null
+++ b/lib/spack/spack/test/cmd/common/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py
index 9b3ca6e66e..6de1546e09 100644
--- a/lib/spack/spack/test/cmd/env.py
+++ b/lib/spack/spack/test/cmd/env.py
@@ -63,6 +63,27 @@ def test_add():
assert Spec('mpileaks') in e.user_specs
+def test_env_add_virtual():
+ env('create', 'test')
+
+ e = ev.read('test')
+ e.add('mpi')
+ e.concretize()
+
+ hashes = e.concretized_order
+ assert len(hashes) == 1
+ spec = e.specs_by_hash[hashes[0]]
+ assert spec.satisfies('mpi')
+
+
+def test_env_add_nonexistant_fails():
+ env('create', 'test')
+
+ e = ev.read('test')
+ with pytest.raises(ev.SpackEnvironmentError, match=r'no such package'):
+ e.add('thispackagedoesnotexist')
+
+
def test_env_list(mutable_mock_env_path):
env('create', 'foo')
env('create', 'bar')
@@ -765,13 +786,13 @@ def test_indirect_build_dep():
@pytest.mark.usefixtures('config')
def test_store_different_build_deps():
r"""Ensure that an environment can store two instances of a build-only
-Dependency:
+ dependency::
- x y
- /| (l) | (b)
- (b) | y z2
- \| (b) # noqa: W605
- z1
+ x y
+ /| (l) | (b)
+ (b) | y z2
+ \| (b)
+ z1
"""
default = ('build', 'link')
@@ -1777,7 +1798,7 @@ def test_duplicate_packages_raise_when_concretizing_together():
def test_env_write_only_non_default():
- print(env('create', 'test'))
+ env('create', 'test')
e = ev.read('test')
with open(e.manifest_path, 'r') as f:
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
index 45b065fce9..fe29d12d59 100644
--- a/lib/spack/spack/test/cmd/find.py
+++ b/lib/spack/spack/test/cmd/find.py
@@ -12,9 +12,12 @@ import spack.cmd.find
from spack.main import SpackCommand
from spack.spec import Spec
from spack.util.pattern import Bunch
+import spack.environment as ev
find = SpackCommand('find')
+env = SpackCommand('env')
+install = SpackCommand('install')
base32_alphabet = 'abcdefghijklmnopqrstuvwxyz234567'
@@ -302,3 +305,16 @@ def test_find_no_sections(database, config):
def test_find_command_basic_usage(database):
output = find()
assert 'mpileaks' in output
+
+
+@pytest.mark.regression('9875')
+def test_find_prefix_in_env(mutable_mock_env_path, install_mockery, mock_fetch,
+ mock_packages, mock_archive, config):
+ """Test `find` formats requiring concrete specs work in environments."""
+ env('create', 'test')
+ with ev.read('test'):
+ install('mpileaks')
+ find('-p')
+ find('-l')
+ find('-L')
+ # Would throw error on regression
diff --git a/lib/spack/spack/test/cmd/flake8.py b/lib/spack/spack/test/cmd/flake8.py
index b95db93364..6cad6950ba 100644
--- a/lib/spack/spack/test/cmd/flake8.py
+++ b/lib/spack/spack/test/cmd/flake8.py
@@ -45,7 +45,7 @@ def flake8_package():
def test_changed_files(parser, flake8_package):
- args = parser.parse_args()
+ args = parser.parse_args([])
# changed_files returns file paths relative to the root
# directory of Spack. Convert to absolute file paths.
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index db8cf01f48..0d3d5f5de1 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -664,3 +664,18 @@ def test_install_only_dependencies_of_all_in_env(
assert not os.path.exists(root.prefix)
for dep in root.traverse(root=False):
assert os.path.exists(dep.prefix)
+
+
+def test_install_help_does_not_show_cdash_options(capsys):
+ """Make sure `spack install --help` does not describe CDash arguments"""
+ with pytest.raises(SystemExit):
+ install('--help')
+ captured = capsys.readouterr()
+ assert 'CDash URL' not in captured.out
+
+
+def test_install_help_cdash(capsys):
+ """Make sure `spack install --help-cdash` describes CDash arguments"""
+ install_cmd = SpackCommand('install')
+ out = install_cmd('--help-cdash')
+ assert 'CDash URL' in out
diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py
index 889d81f98b..f29e135d82 100644
--- a/lib/spack/spack/test/cmd/mirror.py
+++ b/lib/spack/spack/test/cmd/mirror.py
@@ -6,7 +6,7 @@
import pytest
import os
-from spack.main import SpackCommand
+from spack.main import SpackCommand, SpackCommandError
import spack.environment as ev
import spack.config
@@ -16,6 +16,25 @@ add = SpackCommand('add')
concretize = SpackCommand('concretize')
+@pytest.fixture
+def tmp_scope():
+ """Creates a temporary configuration scope"""
+
+ base_name = 'internal-testing-scope'
+ current_overrides = set(
+ x.name for x in
+ spack.config.config.matching_scopes(r'^{0}'.format(base_name)))
+
+ num_overrides = 0
+ scope_name = base_name
+ while scope_name in current_overrides:
+ scope_name = '{0}{1}'.format(base_name, num_overrides)
+ num_overrides += 1
+
+ with spack.config.override(spack.config.InternalConfigScope(scope_name)):
+ yield scope_name
+
+
@pytest.mark.disable_clean_stage_check
@pytest.mark.regression('8083')
def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
@@ -45,3 +64,49 @@ def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config,
mirror_res = os.listdir(os.path.join(mirror_dir, spec.name))
expected = ['%s.tar.gz' % spec.format('{name}-{version}')]
assert mirror_res == expected
+
+
+def test_mirror_crud(tmp_scope, capsys):
+ with capsys.disabled():
+ mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
+
+ output = mirror('remove', '--scope', tmp_scope, 'mirror')
+ assert 'Removed mirror' in output
+
+ mirror('add', '--scope', tmp_scope, 'mirror', 'http://spack.io')
+
+ # no-op
+ output = mirror('set-url', '--scope', tmp_scope,
+ 'mirror', 'http://spack.io')
+ assert 'Url already set' in output
+
+ output = mirror('set-url', '--scope', tmp_scope,
+ '--push', 'mirror', 's3://spack-public')
+ assert 'Changed (push) url' in output
+
+ # no-op
+ output = mirror('set-url', '--scope', tmp_scope,
+ '--push', 'mirror', 's3://spack-public')
+ assert 'Url already set' in output
+
+ output = mirror('remove', '--scope', tmp_scope, 'mirror')
+ assert 'Removed mirror' in output
+
+ output = mirror('list', '--scope', tmp_scope)
+ assert 'No mirrors configured' in output
+
+
+def test_mirror_nonexisting(tmp_scope):
+ with pytest.raises(SpackCommandError):
+ mirror('remove', '--scope', tmp_scope, 'not-a-mirror')
+
+ with pytest.raises(SpackCommandError):
+ mirror('set-url', '--scope', tmp_scope,
+ 'not-a-mirror', 'http://spack.io')
+
+
+def test_mirror_name_collision(tmp_scope):
+ mirror('add', '--scope', tmp_scope, 'first', '1')
+
+ with pytest.raises(SpackCommandError):
+ mirror('add', '--scope', tmp_scope, 'first', '1')
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 969e2471e4..3ac4d893af 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -240,9 +240,6 @@ def mock_fetch_cache(monkeypatch):
return MockCacheFetcher()
class MockCacheFetcher(object):
- def set_stage(self, stage):
- pass
-
def fetch(self):
raise FetchError('Mock cache always fails for tests')
diff --git a/lib/spack/spack/test/data/targets/linux-centos7-cascadelake b/lib/spack/spack/test/data/targets/linux-centos7-cascadelake
new file mode 100644
index 0000000000..e409c3d07a
--- /dev/null
+++ b/lib/spack/spack/test/data/targets/linux-centos7-cascadelake
@@ -0,0 +1,20 @@
+processor : 0
+vendor_id : GenuineIntel
+cpu family : 6
+model : 85
+model name : Intel(R) Xeon(R) Platinum 8260M CPU @ 2.40GHz
+stepping : 7
+microcode : 0x5000024
+cpu MHz : 2400.000
+cache size : 36608 KB
+physical id : 0
+siblings : 48
+core id : 0
+cpu cores : 24
+apicid : 0
+initial apicid : 0
+fpu : yes
+fpu_exception : yes
+cpuid level : 22
+wp : yes
+flags : fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge mca cmov pat pse36 clflush dts acpi mmx fxsr sse sse2 ss ht tm pbe syscall nx pdpe1gb rdtscp lm constant_tsc art arch_perfmon pebs bts rep_good nopl xtopology nonstop_tsc aperfmperf eagerfpu pni pclmulqdq dtes64 monitor ds_cpl vmx smx est tm2 ssse3 sdbg fma cx16 xtpr pdcm pcid dca sse4_1 sse4_2 x2apic movbe popcnt tsc_deadline_timer aes xsave avx f16c rdrand lahf_lm abm 3dnowprefetch epb cat_l3 cdp_l3 intel_ppin intel_pt ssbd mba ibrs ibpb stibp ibrs_enhanced tpr_shadow vnmi flexpriority ept vpid fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms invpcid rtm cqm mpx rdt_a avx512f avx512dq rdseed adx smap clflushopt clwb avx512cd avx512bw avx512vl xsaveopt xsavec xgetbv1 cqm_llc cqm_occup_llc cqm_mbm_total cqm_mbm_local dtherm ida arat pln pts hwp hwp_act_window hwp_epp hwp_pkg_req pku ospke avx512_vnni md_clear spec_ctrl intel_stibp flush_l1d arch_capabilities \ No newline at end of file
diff --git a/lib/spack/spack/test/data/targets/linux-centos7-thunderx2 b/lib/spack/spack/test/data/targets/linux-centos7-thunderx2
new file mode 100644
index 0000000000..2447306bac
--- /dev/null
+++ b/lib/spack/spack/test/data/targets/linux-centos7-thunderx2
@@ -0,0 +1,8 @@
+processor : 0
+BogoMIPS : 400.00
+Features : fp asimd evtstrm aes pmull sha1 sha2 crc32 atomics cpuid asimdrdm
+CPU implementer : 0x43
+CPU architecture: 8
+CPU variant : 0x1
+CPU part : 0x0af
+CPU revision : 1
diff --git a/lib/spack/spack/test/fetch_strategy.py b/lib/spack/spack/test/fetch_strategy.py
new file mode 100644
index 0000000000..ab1aa35408
--- /dev/null
+++ b/lib/spack/spack/test/fetch_strategy.py
@@ -0,0 +1,17 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import pytest
+
+from spack.fetch_strategy import from_url_scheme
+
+
+def test_fetchstrategy_bad_url_scheme():
+ """Ensure that trying to make a fetch strategy from a URL with an
+ unsupported scheme fails as expected."""
+
+ with pytest.raises(ValueError):
+ fetcher = from_url_scheme( # noqa: F841
+ 'bogus-scheme://example.com/a/b/c')
diff --git a/lib/spack/spack/test/llnl/util/__init__.py b/lib/spack/spack/test/llnl/util/__init__.py
new file mode 100644
index 0000000000..94f8ac4d9e
--- /dev/null
+++ b/lib/spack/spack/test/llnl/util/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
diff --git a/lib/spack/spack/test/llnl/util/cpu.py b/lib/spack/spack/test/llnl/util/cpu.py
index f75c02f4b0..2bf29c9ff8 100644
--- a/lib/spack/spack/test/llnl/util/cpu.py
+++ b/lib/spack/spack/test/llnl/util/cpu.py
@@ -32,6 +32,8 @@ from llnl.util.cpu import Microarchitecture # noqa
'linux-scientific7-piledriver',
'linux-rhel6-piledriver',
'linux-centos7-power8le',
+ 'linux-centos7-thunderx2',
+ 'linux-centos7-cascadelake',
'darwin-mojave-ivybridge',
'darwin-mojave-haswell',
'darwin-mojave-skylake',
@@ -86,6 +88,7 @@ def supported_target(request):
return request.param
+@pytest.mark.regression('13803')
def test_target_detection(expected_target):
detected_target = llnl.util.cpu.host()
assert detected_target == expected_target
@@ -121,6 +124,8 @@ def test_equality(supported_target):
('piledriver <= steamroller', True),
('zen2 >= zen', True),
('zen >= zen', True),
+ ('aarch64 <= thunderx2', True),
+ ('aarch64 <= a64fx', True),
# Test unrelated microarchitectures
('power8 < skylake', False),
('power8 <= skylake', False),
@@ -205,12 +210,16 @@ def test_target_json_schema():
('nehalem', 'gcc', '4.9.3', '-march=nehalem -mtune=nehalem'),
('nehalem', 'gcc', '4.8.5', '-march=corei7 -mtune=corei7'),
('sandybridge', 'gcc', '4.8.5', '-march=corei7-avx -mtune=corei7-avx'),
+ ('thunderx2', 'gcc', '4.8.5', '-march=armv8-a'),
+ ('thunderx2', 'gcc', '4.9.3', '-march=armv8-a+crc+crypto'),
# Test Clang / LLVM
- ('sandybridge', 'clang', '3.9.0', '-march=x86-64 -mcpu=sandybridge'),
- ('icelake', 'clang', '6.0.0', '-march=x86-64 -mcpu=icelake'),
- ('icelake', 'clang', '8.0.0', '-march=x86-64 -mcpu=icelake-client'),
- ('zen2', 'clang', '9.0.0', '-march=x86-64 -mcpu=znver2'),
- ('power9le', 'clang', '8.0.0', '-march=ppc64le -mcpu=pwr9'),
+ ('sandybridge', 'clang', '3.9.0', '-march=sandybridge -mtune=sandybridge'),
+ ('icelake', 'clang', '6.0.0', '-march=icelake -mtune=icelake'),
+ ('icelake', 'clang', '8.0.0',
+ '-march=icelake-client -mtune=icelake-client'),
+ ('zen2', 'clang', '9.0.0', '-march=znver2 -mtune=znver2'),
+ ('power9le', 'clang', '8.0.0', '-mcpu=power9 -mtune=power9'),
+ ('thunderx2', 'clang', '6.0.0', '-mcpu=thunderx2t99'),
# Test Intel on Intel CPUs
('sandybridge', 'intel', '17.0.2', '-march=corei7-avx -mtune=corei7-avx'),
('sandybridge', 'intel', '18.0.5',
diff --git a/lib/spack/spack/test/modules/__init__.py b/lib/spack/spack/test/modules/__init__.py
new file mode 100644
index 0000000000..94f8ac4d9e
--- /dev/null
+++ b/lib/spack/spack/test/modules/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
diff --git a/lib/spack/spack/test/package_class.py b/lib/spack/spack/test/package_class.py
index 9502be08c9..25c9258759 100644
--- a/lib/spack/spack/test/package_class.py
+++ b/lib/spack/spack/test/package_class.py
@@ -10,14 +10,14 @@ etc.). Only methods like ``possible_dependencies()`` that deal with the
static DSL metadata for packages.
"""
+import pytest
import spack.repo
-def test_possible_dependencies(mock_packages):
- mpileaks = spack.repo.get('mpileaks')
+@pytest.fixture
+def mpileaks_possible_deps(mock_packages):
mpi_names = [spec.name for spec in spack.repo.path.providers_for('mpi')]
-
- assert mpileaks.possible_dependencies(expand_virtuals=True) == {
+ possible = {
'callpath': set(['dyninst'] + mpi_names),
'dyninst': set(['libdwarf', 'libelf']),
'fake': set(),
@@ -29,6 +29,13 @@ def test_possible_dependencies(mock_packages):
'multi-provider-mpi': set(),
'zmpi': set(['fake']),
}
+ return possible
+
+
+def test_possible_dependencies(mock_packages, mpileaks_possible_deps):
+ mpileaks = spack.repo.get('mpileaks')
+ assert (mpileaks.possible_dependencies(expand_virtuals=True) ==
+ mpileaks_possible_deps)
assert mpileaks.possible_dependencies(expand_virtuals=False) == {
'callpath': set(['dyninst']),
@@ -40,6 +47,15 @@ def test_possible_dependencies(mock_packages):
}
+def test_possible_dependencies_missing(mock_packages):
+ md = spack.repo.get("missing-dependency")
+ missing = {}
+ md.possible_dependencies(transitive=True, missing=missing)
+ assert missing["missing-dependency"] == set([
+ "this-is-a-missing-dependency"
+ ])
+
+
def test_possible_dependencies_with_deptypes(mock_packages):
dtbuild1 = spack.repo.get('dtbuild1')
@@ -59,3 +75,17 @@ def test_possible_dependencies_with_deptypes(mock_packages):
'dtbuild1': set(['dtlink2']),
'dtlink2': set(),
}
+
+
+def test_possible_dependencies_with_multiple_classes(
+ mock_packages, mpileaks_possible_deps):
+ pkgs = ['dt-diamond', 'mpileaks']
+ expected = mpileaks_possible_deps.copy()
+ expected.update({
+ 'dt-diamond': set(['dt-diamond-left', 'dt-diamond-right']),
+ 'dt-diamond-left': set(['dt-diamond-bottom']),
+ 'dt-diamond-right': set(['dt-diamond-bottom']),
+ 'dt-diamond-bottom': set(),
+ })
+
+ assert spack.package.possible_dependencies(*pkgs) == expected
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index bbf63881a7..1764d8ac25 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -10,6 +10,7 @@ import re
import pytest
import spack.fetch_strategy
+import spack.package
import spack.paths
import spack.repo
import spack.util.executable as executable
@@ -141,7 +142,6 @@ def test_all_packages_use_sha256_checksums():
assert [] == errors
-@pytest.mark.xfail
def test_api_for_build_and_run_environment():
"""Ensure that every package uses the correct API to set build and
run environment, and not the old one.
@@ -154,7 +154,7 @@ def test_api_for_build_and_run_environment():
failing.append(pkg)
msg = ('there are {0} packages using the old API to set build '
- 'and run environment [{1}], for further information see'
+ 'and run environment [{1}], for further information see '
'https://github.com/spack/spack/pull/11115')
assert not failing, msg.format(
len(failing), ','.join(x.name for x in failing)
@@ -182,7 +182,24 @@ def test_prs_update_old_api():
if failed:
failing.append(name)
- msg = 'there are {0} packages still using old APIs in this PR [{1}]'
+ msg = ('there are {0} packages using the old API to set build '
+ 'and run environment [{1}], for further information see '
+ 'https://github.com/spack/spack/pull/11115')
assert not failing, msg.format(
len(failing), ','.join(failing)
)
+
+
+def test_all_dependencies_exist():
+ """Make sure no packages have nonexisting dependencies."""
+ missing = {}
+ pkgs = [pkg for pkg in spack.repo.path.all_package_names()]
+ spack.package.possible_dependencies(
+ *pkgs, transitive=True, missing=missing)
+
+ lines = [
+ "%s: [%s]" % (name, ", ".join(deps)) for name, deps in missing.items()
+ ]
+ assert not missing, "These packages have missing dependencies:\n" + (
+ "\n".join(lines)
+ )
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index 186b0d0007..bd4ba95053 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -16,6 +16,11 @@ from spack.version import VersionChecksumError
import spack.directives
+def _generate_content_strip_name(spec):
+ content = package_content(spec)
+ return content.replace(spec.package.__class__.__name__, '')
+
+
@pytest.mark.usefixtures('config', 'mock_packages')
class TestPackage(object):
def test_load_package(self):
@@ -53,38 +58,43 @@ class TestPackage(object):
assert '_3db' == mod_to_class('3db')
def test_content_hash_all_same_but_patch_contents(self):
- spec1 = Spec("hash-test1@1.1")
- spec2 = Spec("hash-test2@1.1")
- spec1.concretize()
- spec2.concretize()
- content1 = package_content(spec1)
- content1 = content1.replace(spec1.package.__class__.__name__, '')
- content2 = package_content(spec2)
- content2 = content2.replace(spec2.package.__class__.__name__, '')
+ spec1 = Spec("hash-test1@1.1").concretized()
+ spec2 = Spec("hash-test2@1.1").concretized()
+ content1 = _generate_content_strip_name(spec1)
+ content2 = _generate_content_strip_name(spec2)
assert spec1.package.content_hash(content=content1) != \
spec2.package.content_hash(content=content2)
def test_content_hash_different_variants(self):
- spec1 = Spec("hash-test1@1.2 +variantx")
- spec2 = Spec("hash-test2@1.2 ~variantx")
- spec1.concretize()
- spec2.concretize()
- content1 = package_content(spec1)
- content1 = content1.replace(spec1.package.__class__.__name__, '')
- content2 = package_content(spec2)
- content2 = content2.replace(spec2.package.__class__.__name__, '')
+ spec1 = Spec("hash-test1@1.2 +variantx").concretized()
+ spec2 = Spec("hash-test2@1.2 ~variantx").concretized()
+ content1 = _generate_content_strip_name(spec1)
+ content2 = _generate_content_strip_name(spec2)
assert spec1.package.content_hash(content=content1) == \
spec2.package.content_hash(content=content2)
+ def test_content_hash_cannot_get_details_from_ast(self):
+ """Packages hash-test1 and hash-test3 would be considered the same
+ except that hash-test3 conditionally executes a phase based on
+ a "when" directive that Spack cannot evaluate by examining the
+ AST. This test ensures that Spack can compute a content hash
+ for hash-test3. If Spack cannot determine when a phase applies,
+ it adds it by default, so the test also ensures that the hashes
+ differ where Spack includes a phase on account of AST-examination
+ failure.
+ """
+ spec3 = Spec("hash-test1@1.7").concretized()
+ spec4 = Spec("hash-test3@1.7").concretized()
+ content3 = _generate_content_strip_name(spec3)
+ content4 = _generate_content_strip_name(spec4)
+ assert(spec3.package.content_hash(content=content3) !=
+ spec4.package.content_hash(content=content4))
+
def test_all_same_but_archive_hash(self):
- spec1 = Spec("hash-test1@1.3")
- spec2 = Spec("hash-test2@1.3")
- spec1.concretize()
- spec2.concretize()
- content1 = package_content(spec1)
- content1 = content1.replace(spec1.package.__class__.__name__, '')
- content2 = package_content(spec2)
- content2 = content2.replace(spec2.package.__class__.__name__, '')
+ spec1 = Spec("hash-test1@1.3").concretized()
+ spec2 = Spec("hash-test2@1.3").concretized()
+ content1 = _generate_content_strip_name(spec1)
+ content2 = _generate_content_strip_name(spec2)
assert spec1.package.content_hash(content=content1) != \
spec2.package.content_hash(content=content2)
diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py
index b705d01e42..5a4b6dba34 100644
--- a/lib/spack/spack/test/patch.py
+++ b/lib/spack/spack/test/patch.py
@@ -24,6 +24,7 @@ from spack.spec import Spec
foo_sha256 = 'b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c'
bar_sha256 = '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730'
baz_sha256 = 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c'
+biz_sha256 = 'a69b288d7393261e613c276c6d38a01461028291f6e381623acc58139d01f54d'
# url patches
url1_sha256 = 'abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234'
@@ -105,6 +106,20 @@ def test_patch_in_spec(mock_packages, config):
tuple(spec.variants['patches']._patches_in_order_of_appearance))
+def test_patch_mixed_versions_subset_constraint(mock_packages, config):
+ """If we have a package with mixed x.y and x.y.z versions, make sure that
+ a patch applied to a version range of x.y.z versions is not applied to
+ an x.y version.
+ """
+ spec1 = Spec('patch@1.0.1')
+ spec1.concretize()
+ assert biz_sha256 in spec1.variants['patches'].value
+
+ spec2 = Spec('patch@1.0')
+ spec2.concretize()
+ assert biz_sha256 not in spec2.variants['patches'].value
+
+
def test_patch_order(mock_packages, config):
spec = Spec('dep-diamond-patch-top')
spec.concretize()
diff --git a/lib/spack/spack/test/relocate.py b/lib/spack/spack/test/relocate.py
index f070e150c7..4d7dc5b942 100644
--- a/lib/spack/spack/test/relocate.py
+++ b/lib/spack/spack/test/relocate.py
@@ -60,6 +60,15 @@ def test_file_is_relocatable(source_file, is_relocatable):
assert spack.relocate.file_is_relocatable(executable) is is_relocatable
+@pytest.mark.requires_executables(
+ 'patchelf', 'strings', 'file'
+)
+def test_patchelf_is_relocatable():
+ patchelf = spack.relocate.get_patchelf()
+ assert spack.relocate.is_binary(patchelf)
+ assert spack.relocate.file_is_relocatable(patchelf)
+
+
@pytest.mark.skipif(
platform.system().lower() != 'linux',
reason='implementation for MacOS still missing'
diff --git a/lib/spack/spack/test/s3_fetch.py b/lib/spack/spack/test/s3_fetch.py
new file mode 100644
index 0000000000..d904417ed0
--- /dev/null
+++ b/lib/spack/spack/test/s3_fetch.py
@@ -0,0 +1,29 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import pytest
+
+import spack.fetch_strategy as spack_fs
+import spack.stage as spack_stage
+
+
+def test_s3fetchstrategy_sans_url():
+ """Ensure constructor with no URL fails."""
+ with pytest.raises(ValueError):
+ spack_fs.S3FetchStrategy(None)
+
+
+def test_s3fetchstrategy_bad_url(tmpdir):
+ """Ensure fetch with bad URL fails as expected."""
+ testpath = str(tmpdir)
+
+ fetcher = spack_fs.S3FetchStrategy(url='file:///does-not-exist')
+ assert fetcher is not None
+
+ with spack_stage.Stage(fetcher, path=testpath) as stage:
+ assert stage is not None
+ assert fetcher.archive_file is None
+ with pytest.raises(spack_fs.FetchError):
+ fetcher.fetch()
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index a183742e65..9d8b9de647 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -9,7 +9,7 @@ import pytest
from spack.spec import Spec, UnsatisfiableSpecError, SpecError
from spack.spec import substitute_abstract_variants
from spack.spec import SpecFormatSigilError, SpecFormatStringError
-from spack.variant import InvalidVariantValueError
+from spack.variant import InvalidVariantValueError, UnknownVariantError
from spack.variant import MultipleValuesInExclusiveVariantError
import spack.architecture
@@ -981,3 +981,9 @@ class TestSpecSematics(object):
def test_target_constraints(self, spec, constraint, expected_result):
s = Spec(spec)
assert s.satisfies(constraint) is expected_result
+
+ @pytest.mark.regression('13124')
+ def test_error_message_unknown_variant(self):
+ s = Spec('mpileaks +unknown')
+ with pytest.raises(UnknownVariantError, match=r'package has no such'):
+ s.concretize()
diff --git a/lib/spack/spack/test/url_fetch.py b/lib/spack/spack/test/url_fetch.py
index 8047d5e26e..b4df27336e 100644
--- a/lib/spack/spack/test/url_fetch.py
+++ b/lib/spack/spack/test/url_fetch.py
@@ -3,6 +3,7 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import collections
import os
import pytest
@@ -10,8 +11,7 @@ from llnl.util.filesystem import working_dir, is_exe
import spack.repo
import spack.config
-from spack.fetch_strategy import FailedDownloadError
-from spack.fetch_strategy import from_list_url, URLFetchStrategy
+import spack.fetch_strategy as fs
from spack.spec import Spec
from spack.stage import Stage
from spack.version import ver
@@ -23,10 +23,30 @@ def checksum_type(request):
return request.param
+@pytest.fixture
+def pkg_factory():
+ Pkg = collections.namedtuple(
+ 'Pkg', ['url_for_version', 'urls', 'url', 'versions']
+ )
+
+ def factory(url, urls):
+
+ def fn(v):
+ main_url = url or urls.pop(0)
+ return spack.url.substitute_version(main_url, v)
+
+ return Pkg(
+ url_for_version=fn, url=url, urls=urls,
+ versions=collections.defaultdict(dict)
+ )
+
+ return factory
+
+
def test_urlfetchstrategy_sans_url():
"""Ensure constructor with no URL fails."""
with pytest.raises(ValueError):
- with URLFetchStrategy(None):
+ with fs.URLFetchStrategy(None):
pass
@@ -34,8 +54,8 @@ def test_urlfetchstrategy_bad_url(tmpdir):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
- with pytest.raises(FailedDownloadError):
- fetcher = URLFetchStrategy(url='file:///does-not-exist')
+ with pytest.raises(fs.FailedDownloadError):
+ fetcher = fs.URLFetchStrategy(url='file:///does-not-exist')
assert fetcher is not None
with Stage(fetcher, path=testpath) as stage:
@@ -106,8 +126,8 @@ def test_from_list_url(mock_packages, config, spec, url, digest):
"""
specification = Spec(spec).concretized()
pkg = spack.repo.get(specification)
- fetch_strategy = from_list_url(pkg)
- assert isinstance(fetch_strategy, URLFetchStrategy)
+ fetch_strategy = fs.from_list_url(pkg)
+ assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == url
assert fetch_strategy.digest == digest
@@ -118,8 +138,8 @@ def test_from_list_url_unspecified(mock_packages, config):
spec = Spec('url-list-test @2.0.0').concretized()
pkg = spack.repo.get(spec)
- fetch_strategy = from_list_url(pkg)
- assert isinstance(fetch_strategy, URLFetchStrategy)
+ fetch_strategy = fs.from_list_url(pkg)
+ assert isinstance(fetch_strategy, fs.URLFetchStrategy)
assert os.path.basename(fetch_strategy.url) == 'foo-2.0.0.tar.gz'
assert fetch_strategy.digest is None
@@ -128,7 +148,7 @@ def test_nosource_from_list_url(mock_packages, config):
"""This test confirms BundlePackages do not have list url."""
pkg = spack.repo.get('nosource')
- fetch_strategy = from_list_url(pkg)
+ fetch_strategy = fs.from_list_url(pkg)
assert fetch_strategy is None
@@ -148,9 +168,26 @@ def test_url_extra_fetch(tmpdir, mock_archive):
"""Ensure a fetch after downloading is effectively a no-op."""
testpath = str(tmpdir)
- fetcher = URLFetchStrategy(mock_archive.url)
+ fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
assert fetcher.archive_file is None
stage.fetch()
assert fetcher.archive_file is not None
fetcher.fetch()
+
+
+@pytest.mark.parametrize('url,urls,version,expected', [
+ (None,
+ ['https://ftpmirror.gnu.org/autoconf/autoconf-2.69.tar.gz',
+ 'https://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz'],
+ '2.62',
+ ['https://ftpmirror.gnu.org/autoconf/autoconf-2.62.tar.gz',
+ 'https://ftp.gnu.org/gnu/autoconf/autoconf-2.62.tar.gz'])
+])
+def test_candidate_urls(pkg_factory, url, urls, version, expected):
+ """Tests that candidate urls include mirrors and that they go through
+ pattern matching and substitution for versions.
+ """
+ pkg = pkg_factory(url, urls)
+ f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
+ assert f.candidate_urls == expected
diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py
index c557f78405..7c292938d2 100644
--- a/lib/spack/spack/test/url_parse.py
+++ b/lib/spack/spack/test/url_parse.py
@@ -60,6 +60,8 @@ from spack.version import Version
('cppad-20170114.gpl', 'cppad-20170114'),
# Arch
('pcraster-4.1.0_x86-64', 'pcraster-4.1.0'),
+ ('dislin-11.0.linux.i586_64', 'dislin-11.0'),
+ ('PAGIT.V1.01.64bit', 'PAGIT.V1.01'),
# OS - linux
('astyle_2.04_linux', 'astyle_2.04'),
# OS - unix
@@ -85,20 +87,31 @@ from spack.version import Version
# Combinations of multiple patterns - darwin
('ghc-7.0.4-x86_64-apple-darwin', 'ghc-7.0.4'),
('ghc-7.0.4-i386-apple-darwin', 'ghc-7.0.4'),
+ # Combinations of multiple patterns - centos
+ ('sratoolkit.2.8.2-1-centos_linux64', 'sratoolkit.2.8.2-1'),
# Combinations of multiple patterns - arch
('VizGlow_v2.2alpha17-R21November2016-Linux-x86_64-Install',
'VizGlow_v2.2alpha17-R21November2016'),
('jdk-8u92-linux-x64', 'jdk-8u92'),
('cuda_6.5.14_linux_64.run', 'cuda_6.5.14'),
+ ('Mathematica_12.0.0_LINUX.sh', 'Mathematica_12.0.0'),
+ ('trf407b.linux64', 'trf407b'),
# Combinations of multiple patterns - with
('mafft-7.221-with-extensions-src', 'mafft-7.221'),
('spark-2.0.0-bin-without-hadoop', 'spark-2.0.0'),
+ ('conduit-v0.3.0-src-with-blt', 'conduit-v0.3.0'),
+ # Combinations of multiple patterns - rock
+ ('bitlib-23-2.src.rock', 'bitlib-23-2'),
# Combinations of multiple patterns - public
('dakota-6.3-public.src', 'dakota-6.3'),
# Combinations of multiple patterns - universal
('synergy-1.3.6p2-MacOSX-Universal', 'synergy-1.3.6p2'),
# Combinations of multiple patterns - dynamic
('snptest_v2.5.2_linux_x86_64_dynamic', 'snptest_v2.5.2'),
+ # Combinations of multiple patterns - other
+ ('alglib-3.11.0.cpp.gpl', 'alglib-3.11.0'),
+ ('hpcviewer-2019.08-linux.gtk.x86_64', 'hpcviewer-2019.08'),
+ ('apache-mxnet-src-1.3.0-incubating', 'apache-mxnet-src-1.3.0'),
])
def test_url_strip_version_suffixes(url, expected):
stripped = strip_version_suffixes(url)
@@ -109,24 +122,40 @@ def test_url_strip_version_suffixes(url, expected):
# No suffix
('rgb-1.0.6', '1.0.6', 'rgb'),
('nauty26r7', '26r7', 'nauty'),
+ ('PAGIT.V1.01', '1.01', 'PAGIT'),
+ ('AmpliconNoiseV1.29', '1.29', 'AmpliconNoise'),
# Download type - install
('converge_install_2.3.16', '2.3.16', 'converge'),
# Download type - src
('jpegsrc.v9b', '9b', 'jpeg'),
+ ('blatSrc35', '35', 'blat'),
+ # Download type - open
+ ('RepeatMasker-open-4-0-7', '4-0-7', 'RepeatMasker'),
# Download type - archive
('coinhsl-archive-2014.01.17', '2014.01.17', 'coinhsl'),
# Download type - std
('ghostscript-fonts-std-8.11', '8.11', 'ghostscript-fonts'),
+ # Download type - bin
+ ('GapCloser-bin-v1.12-r6', '1.12-r6', 'GapCloser'),
+ # Download type - software
+ ('orthomclSoftware-v2.0.9', '2.0.9', 'orthomcl'),
# Download version - release
('cbench_release_1.3.0.tar.gz', '1.3.0', 'cbench'),
# Download version - snapshot
('gts-snapshot-121130', '121130', 'gts'),
# Download version - distrib
('zoltan_distrib_v3.83', '3.83', 'zoltan'),
+ # Download version - latest
+ ('Platypus-latest', 'N/A', 'Platypus'),
+ # Download version - complex
+ ('qt-everywhere-opensource-src-5.7.0', '5.7.0', 'qt'),
+ # Arch
+ ('VESTA-x86_64', '3.4.6', 'VESTA'),
# VCS - bazaar
('libvterm-0+bzr681', '681', 'libvterm'),
# License - gpl
- ('PyQt-x11-gpl-4.11.3', '4.11.3', 'PyQt-x11')
+ ('PyQt-x11-gpl-4.11.3', '4.11.3', 'PyQt'),
+ ('PyQt4_gpl_x11-4.12.3', '4.12.3', 'PyQt4'),
])
def test_url_strip_name_suffixes(url, version, expected):
stripped = strip_name_suffixes(url, version)
@@ -182,6 +211,7 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
@pytest.mark.parametrize('name,version,url', [
# Common Repositories - github downloads
+ # name/archive/ver.ver
('nco', '4.6.2', 'https://github.com/nco/nco/archive/4.6.2.tar.gz'),
# name/archive/vver.ver
('vim', '8.0.0134', 'https://github.com/vim/vim/archive/v8.0.0134.tar.gz'),
@@ -257,6 +287,15 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
# Common Tarball Formats
+ # 1st Pass: Simplest case
+ # Assume name contains no digits and version contains no letters
+
+ # name-ver.ver
+ ('libpng', '1.6.37', 'http://download.sourceforge.net/libpng/libpng-1.6.37.tar.gz'),
+
+ # 2nd Pass: Version only
+ # Assume version contains no letters
+
# ver.ver
('eigen', '3.2.7', 'https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2'),
# ver.ver-ver
@@ -266,10 +305,17 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
# vver_ver
('luafilesystem', '1_6_3', 'https://github.com/keplerproject/luafilesystem/archive/v1_6_3.tar.gz'),
- # No separators
+ # 3rd Pass: No separator characters are used
+ # Assume name contains no digits
+
+ # namever
('turbolinux', '702', 'file://{0}/turbolinux702.tar.gz'.format(os.getcwd())),
('nauty', '26r7', 'http://pallini.di.uniroma1.it/nauty26r7.tar.gz'),
- # Dashes only
+
+ # 4th Pass: A single separator character is used
+ # Assume name contains no digits
+
+ # name-name-ver-ver
('Trilinos', '12-10-1',
'https://github.com/trilinos/Trilinos/archive/trilinos-release-12-10-1.tar.gz'),
('panda', '2016-03-07',
@@ -278,7 +324,7 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
'http://gts.sourceforge.net/tarballs/gts-snapshot-121130.tar.gz'),
('cdd', '061a',
'http://www.cs.mcgill.ca/~fukuda/download/cdd/cdd-061a.tar.gz'),
- # Only underscores
+ # name_name_ver_ver
('tinyxml', '2_6_2',
'https://sourceforge.net/projects/tinyxml/files/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz'),
('boost', '1_55_0',
@@ -287,9 +333,6 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
'https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz'),
('tbb', '44_20160413',
'https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160413oss_src.tgz'),
-
- # Only dots
-
# name.name.ver.ver
('prank', '150803', 'http://wasabiapp.org/download/prank/prank.source.150803.tgz'),
('jpeg', '9b', 'http://www.ijg.org/files/jpegsrc.v9b.tar.gz'),
@@ -302,61 +345,51 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
('geant', '4.10.01.p03', 'http://geant4.cern.ch/support/source/geant4.10.01.p03.tar.gz'),
('tcl', '8.6.5', 'http://prdownloads.sourceforge.net/tcl/tcl8.6.5-src.tar.gz'),
- # Dash and dots
+ # 5th Pass: Two separator characters are used
+ # Name may contain digits, version may contain letters
# name-name-ver.ver
- # digit in name
('m4', '1.4.17', 'https://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz'),
- # letter in version
('gmp', '6.0.0a', 'https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2'),
- # version starts with 'v'
('LaunchMON', '1.0.2',
'https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz'),
# name-ver-ver.ver
('libedit', '20150325-3.1', 'http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz'),
-
- # Dash and unserscores
-
# name-name-ver_ver
('icu4c', '57_1', 'http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz'),
-
- # Underscores and dots
-
# name_name_ver.ver
('superlu_dist', '4.1', 'http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz'),
('pexsi', '0.9.0', 'https://math.berkeley.edu/~linlin/pexsi/download/pexsi_v0.9.0.tar.gz'),
# name_name.ver.ver
('fer', '696', 'ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.v696.tar.gz'),
-
- # Dash dot dah dot
-
+ # name_name_ver-ver
+ ('Bridger', '2014-12-01',
+ 'https://downloads.sourceforge.net/project/rnaseqassembly/Bridger_r2014-12-01.tar.gz'),
# name-name-ver.ver-ver.ver
('sowing', '1.1.23-p1', 'http://ftp.mcs.anl.gov/pub/petsc/externalpackages/sowing-1.1.23-p1.tar.gz'),
('bib2xhtml', '3.0-15-gf506', 'http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz'),
# namever.ver-ver.ver
('go', '1.4-bootstrap-20161024', 'https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz'),
- # Underscore dash dot
+ # 6th Pass: All three separator characters are used
+ # Name may contain digits, version may contain letters
# name_name-ver.ver
('the_silver_searcher', '0.32.0', 'http://geoff.greer.fm/ag/releases/the_silver_searcher-0.32.0.tar.gz'),
('sphinx_rtd_theme', '0.1.10a0',
'https://pypi.python.org/packages/source/s/sphinx_rtd_theme/sphinx_rtd_theme-0.1.10a0.tar.gz'),
-
- # Dot underscore dot dash dot
-
# name.name_ver.ver-ver.ver
('TH.data', '1.0-8', 'https://cran.r-project.org/src/contrib/TH.data_1.0-8.tar.gz'),
('XML', '3.98-1.4', 'https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz'),
-
- # Dash dot underscore dot
-
# name-name-ver.ver_ver.ver
('pypar', '2.1.5_108',
'https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/pypar/pypar-2.1.5_108.tgz'),
# name-namever.ver_ver.ver
('STAR-CCM+', '11.06.010_02',
'file://{0}/STAR-CCM+11.06.010_02_linux-x86_64.tar.gz'.format(os.getcwd())),
+ # name-name_name-ver.ver
+ ('PerlIO-utf8_strict', '0.002',
+ 'http://search.cpan.org/CPAN/authors/id/L/LE/LEONT/PerlIO-utf8_strict-0.002.tar.gz'),
# Various extensions
# .tar.gz
@@ -399,18 +432,61 @@ def test_url_parse_offset(name, noffset, ver, voffset, path):
# .txz
('kim-api', '2.1.0', 'https://s3.openkim.org/kim-api/kim-api-2.1.0.txz'),
- # Weird URLS
+ # 8th Pass: Query strings
- # github.com/repo/name/releases/download/name-vver/name
- ('nextflow', '0.20.1', 'https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow'),
# suffix queries
('swiftsim', '0.3.0', 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0'),
- ('swiftsim', '0.3.0', 'https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0'),
+ ('swiftsim', '0.3.0',
+ 'https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0'),
('sionlib', '1.7.1', 'http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1'),
+ ('jube2', '2.2.2', 'https://apps.fz-juelich.de/jsc/jube/jube2/download.php?version=2.2.2'),
+ ('archive', '1.0.0', 'https://code.ornl.gov/eck/papyrus/repository/archive.tar.bz2?ref=v1.0.0'),
+ ('VecGeom', '0.3.rc',
+ 'https://gitlab.cern.ch/api/v4/projects/VecGeom%2FVecGeom/repository/archive.tar.gz?sha=v0.3.rc'),
+ ('parsplice', '1.1',
+ 'https://gitlab.com/api/v4/projects/exaalt%2Fparsplice/repository/archive.tar.gz?sha=v1.1'),
+ ('busco', '2.0.1', 'https://gitlab.com/api/v4/projects/ezlab%2Fbusco/repository/archive.tar.gz?sha=2.0.1'),
+ ('libaec', '1.0.2',
+ 'https://gitlab.dkrz.de/api/v4/projects/k202009%2Flibaec/repository/archive.tar.gz?sha=v1.0.2'),
+ ('icet', '2.1.1',
+ 'https://gitlab.kitware.com/api/v4/projects/icet%2Ficet/repository/archive.tar.bz2?sha=IceT-2.1.1'),
+ ('vtk-m', '1.3.0',
+ 'https://gitlab.kitware.com/api/v4/projects/vtk%2Fvtk-m/repository/archive.tar.gz?sha=v1.3.0'),
+ ('GATK', '3.8-1-0-gf15c1c3ef',
+ 'https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef'),
# stem queries
('slepc', '3.6.2', 'http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz'),
('otf', '1.12.5salmon',
'http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz'),
+ ('eospac', '6.4.0beta.1',
+ 'http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz'),
+ ('vampirtrace', '5.14.4',
+ 'http://wwwpub.zih.tu-dresden.de/~mlieber/dcount/dcount.php?package=vampirtrace&get=VampirTrace-5.14.4.tar.gz'),
+ # (we don't actually look for these, they are picked up
+ # during the preliminary stem parsing)
+ ('octopus', '6.0', 'http://octopus-code.org/down.php?file=6.0/octopus-6.0.tar.gz'),
+ ('cloog', '0.18.1', 'http://www.bastoul.net/cloog/pages/download/count.php3?url=./cloog-0.18.1.tar.gz'),
+ ('libxc', '2.2.2', 'http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz'),
+ ('cistem', '1.0.0-beta',
+ 'https://cistem.org/system/tdf/upload3/cistem-1.0.0-beta-source-code.tar.gz?file=1&type=cistem_details&id=37&force=0'),
+ ('Magics', '4.1.0',
+ 'https://confluence.ecmwf.int/download/attachments/3473464/Magics-4.1.0-Source.tar.gz?api=v2'),
+ ('grib_api', '1.17.0',
+ 'https://software.ecmwf.int/wiki/download/attachments/3473437/grib_api-1.17.0-Source.tar.gz?api=v2'),
+ ('eccodes', '2.2.0',
+ 'https://software.ecmwf.int/wiki/download/attachments/45757960/eccodes-2.2.0-Source.tar.gz?api=v2'),
+ ('SWFFT', '1.0',
+ 'https://xgitlab.cels.anl.gov/api/v4/projects/hacc%2FSWFFT/repository/archive.tar.gz?sha=v1.0'),
+
+ # 9th Pass: Version in path
+
+ # github.com/repo/name/releases/download/name-vver/name
+ ('nextflow', '0.20.1', 'https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow'),
+ # ver/name
+ ('ncbi', '2.2.26', 'ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz'),
+
+ # Other tests for corner cases
+
# single character name
('R', '3.3.2', 'https://cloud.r-project.org/src/base/R-3/R-3.3.2.tar.gz'),
# name starts with digit
diff --git a/lib/spack/spack/test/util/__init__.py b/lib/spack/spack/test/util/__init__.py
new file mode 100644
index 0000000000..94f8ac4d9e
--- /dev/null
+++ b/lib/spack/spack/test/util/__init__.py
@@ -0,0 +1,4 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
diff --git a/lib/spack/spack/test/util/executable.py b/lib/spack/spack/test/util/executable.py
index 054cdbfccd..1a9b636a7e 100644
--- a/lib/spack/spack/test/util/executable.py
+++ b/lib/spack/spack/test/util/executable.py
@@ -8,6 +8,7 @@ import sys
import llnl.util.filesystem as fs
import spack.util.executable as ex
+from spack.hooks.sbang import filter_shebangs_in_directory
def test_read_unicode(tmpdir):
@@ -28,6 +29,7 @@ print(u'\\xc3')
# make it executable
fs.set_executable(script_name)
+ filter_shebangs_in_directory('.', [script_name])
# read the unicode back in and see whether things work
script = ex.Executable('./%s' % script_name)
diff --git a/lib/spack/spack/test/util/util_url.py b/lib/spack/spack/test/util/util_url.py
index 24b40ac63c..855491bfe6 100644
--- a/lib/spack/spack/test/util/util_url.py
+++ b/lib/spack/spack/test/util/util_url.py
@@ -6,6 +6,7 @@
"""Test Spack's URL handling utility functions."""
import os
import os.path
+import spack.paths
import spack.util.url as url_util
@@ -41,7 +42,7 @@ def test_url_parse():
assert(parsed.netloc == 'path')
assert(parsed.path == '/to/resource')
- spack_root = os.path.abspath(os.environ['SPACK_ROOT'])
+ spack_root = spack.paths.spack_root
parsed = url_util.parse('$spack')
assert(parsed.scheme == 'file')
assert(parsed.netloc == '')
@@ -56,7 +57,7 @@ def test_url_parse():
def test_url_local_file_path():
- spack_root = os.path.abspath(os.environ['SPACK_ROOT'])
+ spack_root = spack.paths.spack_root
lfp = url_util.local_file_path('/a/b/c.txt')
assert(lfp == '/a/b/c.txt')
@@ -171,7 +172,7 @@ def test_url_join_local_paths():
'https://mirror.spack.io/build_cache/my-package')
# file:// URL path components are *NOT* canonicalized
- spack_root = os.path.abspath(os.environ['SPACK_ROOT'])
+ spack_root = spack.paths.spack_root
join_result = url_util.join('/a/b/c', '$spack')
assert(join_result == 'file:///a/b/c/$spack') # not canonicalized
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index b39da0c698..60e4497a8e 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -266,6 +266,8 @@ def test_contains():
assert_in('1.3.5-7', '1.2:1.4')
assert_not_in('1.1', '1.2:1.4')
assert_not_in('1.5', '1.2:1.4')
+ assert_not_in('1.5', '1.5.1:1.6')
+ assert_not_in('1.5', '1.5.1:')
assert_in('1.4.2', '1.2:1.4')
assert_not_in('1.4.2', '1.2:1.4.0')
diff --git a/lib/spack/spack/test/web.py b/lib/spack/spack/test/web.py
index c80e29b523..75f5930230 100644
--- a/lib/spack/spack/test/web.py
+++ b/lib/spack/spack/test/web.py
@@ -5,9 +5,12 @@
"""Tests for web.py."""
import os
+import pytest
+
+from ordereddict_backport import OrderedDict
import spack.paths
-from spack.util.web import spider, find_versions_of_archive
+import spack.util.web as web_util
from spack.version import ver
@@ -23,7 +26,7 @@ page_4 = 'file://' + os.path.join(web_data_path, '4.html')
def test_spider_0():
- pages, links = spider(root, depth=0)
+ pages, links = web_util.spider(root, depth=0)
assert root in pages
assert page_1 not in pages
@@ -41,7 +44,7 @@ def test_spider_0():
def test_spider_1():
- pages, links = spider(root, depth=1)
+ pages, links = web_util.spider(root, depth=1)
assert root in pages
assert page_1 in pages
@@ -60,7 +63,7 @@ def test_spider_1():
def test_spider_2():
- pages, links = spider(root, depth=2)
+ pages, links = web_util.spider(root, depth=2)
assert root in pages
assert page_1 in pages
@@ -81,7 +84,7 @@ def test_spider_2():
def test_spider_3():
- pages, links = spider(root, depth=3)
+ pages, links = web_util.spider(root, depth=3)
assert root in pages
assert page_1 in pages
@@ -104,31 +107,36 @@ def test_spider_3():
def test_find_versions_of_archive_0():
- versions = find_versions_of_archive(root_tarball, root, list_depth=0)
+ versions = web_util.find_versions_of_archive(
+ root_tarball, root, list_depth=0)
assert ver('0.0.0') in versions
def test_find_versions_of_archive_1():
- versions = find_versions_of_archive(root_tarball, root, list_depth=1)
+ versions = web_util.find_versions_of_archive(
+ root_tarball, root, list_depth=1)
assert ver('0.0.0') in versions
assert ver('1.0.0') in versions
def test_find_versions_of_archive_2():
- versions = find_versions_of_archive(root_tarball, root, list_depth=2)
+ versions = web_util.find_versions_of_archive(
+ root_tarball, root, list_depth=2)
assert ver('0.0.0') in versions
assert ver('1.0.0') in versions
assert ver('2.0.0') in versions
def test_find_exotic_versions_of_archive_2():
- versions = find_versions_of_archive(root_tarball, root, list_depth=2)
+ versions = web_util.find_versions_of_archive(
+ root_tarball, root, list_depth=2)
# up for grabs to make this better.
assert ver('2.0.0b2') in versions
def test_find_versions_of_archive_3():
- versions = find_versions_of_archive(root_tarball, root, list_depth=3)
+ versions = web_util.find_versions_of_archive(
+ root_tarball, root, list_depth=3)
assert ver('0.0.0') in versions
assert ver('1.0.0') in versions
assert ver('2.0.0') in versions
@@ -137,7 +145,49 @@ def test_find_versions_of_archive_3():
def test_find_exotic_versions_of_archive_3():
- versions = find_versions_of_archive(root_tarball, root, list_depth=3)
+ versions = web_util.find_versions_of_archive(
+ root_tarball, root, list_depth=3)
assert ver('2.0.0b2') in versions
assert ver('3.0a1') in versions
assert ver('4.5-rc5') in versions
+
+
+def test_get_header():
+ headers = {
+ 'Content-type': 'text/plain'
+ }
+
+ # looking up headers should just work like a plain dict
+ # lookup when there is an entry with the right key
+ assert(web_util.get_header(headers, 'Content-type') == 'text/plain')
+
+ # looking up headers should still work if there is a fuzzy match
+ assert(web_util.get_header(headers, 'contentType') == 'text/plain')
+
+ # ...unless there is an exact match for the "fuzzy" spelling.
+ headers['contentType'] = 'text/html'
+ assert(web_util.get_header(headers, 'contentType') == 'text/html')
+
+ # If lookup has to fallback to fuzzy matching and there are more than one
+ # fuzzy match, the result depends on the internal ordering of the given
+ # mapping
+ headers = OrderedDict()
+ headers['Content-type'] = 'text/plain'
+ headers['contentType'] = 'text/html'
+
+ assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/plain')
+ del headers['Content-type']
+ assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/html')
+
+ # Same as above, but different ordering
+ headers = OrderedDict()
+ headers['contentType'] = 'text/html'
+ headers['Content-type'] = 'text/plain'
+
+ assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/html')
+ del headers['contentType']
+ assert(web_util.get_header(headers, 'CONTENT_TYPE') == 'text/plain')
+
+ # If there isn't even a fuzzy match, raise KeyError
+ with pytest.raises(KeyError):
+ web_util.get_header(headers, 'ContentLength')
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index 11b289a0b1..a298d44ea2 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -153,13 +153,14 @@ def strip_version_suffixes(path):
r'[Ii]nstall',
r'all',
r'code',
- r'src(_0)?',
r'[Ss]ources?',
r'file',
r'full',
r'single',
- r'public',
r'with[a-zA-Z_-]+',
+ r'rock',
+ r'src(_0)?',
+ r'public',
r'bin',
r'binary',
r'run',
@@ -189,15 +190,24 @@ def strip_version_suffixes(path):
r'ia32',
r'intel',
r'amd64',
+ r'linux64',
r'x64',
+ r'64bit',
r'x86[_-]64',
+ r'i586_64',
r'x86',
r'i[36]86',
r'ppc64(le)?',
r'armv?(7l|6l|64)',
+ # Other
+ r'cpp',
+ r'gtk',
+ r'incubating',
+
# OS
r'[Ll]inux(_64)?',
+ r'LINUX',
r'[Uu]ni?x',
r'[Ss]un[Oo][Ss]',
r'[Mm]ac[Oo][Ss][Xx]?',
@@ -208,14 +218,18 @@ def strip_version_suffixes(path):
r'[Ww]in(64|32)?',
r'[Cc]ygwin(64|32)?',
r'[Mm]ingw',
+ r'centos',
# Arch
# Needs to come before and after OS, appears in both orders
r'ia32',
r'intel',
r'amd64',
+ r'linux64',
r'x64',
+ r'64bit',
r'x86[_-]64',
+ r'i586_64',
r'x86',
r'i[36]86',
r'ppc64(le)?',
@@ -270,31 +284,41 @@ def strip_name_suffixes(path, version):
# name-ver
# name_ver
# name.ver
- r'[._-]v?' + str(version) + '.*',
+ r'[._-][rvV]?' + str(version) + '.*',
# namever
- str(version) + '.*',
+ r'V?' + str(version) + '.*',
# Download type
r'install',
- r'src',
+ r'[Ss]rc',
r'(open)?[Ss]ources?',
+ r'[._-]open',
r'[._-]archive',
r'[._-]std',
+ r'[._-]bin',
+ r'Software',
# Download version
r'release',
r'snapshot',
r'distrib',
+ r'everywhere',
+ r'latest',
# Arch
- r'Linux64',
+ r'Linux(64)?',
+ r'x86_64',
# VCS
r'0\+bzr',
# License
r'gpl',
+
+ # Needs to come before and after gpl, appears in both orders
+ r'[._-]x11',
+ r'gpl',
]
for regex in suffix_regexes:
@@ -407,7 +431,7 @@ def parse_version_offset(path):
# 3. names can contain A-Z, a-z, 0-9, '+', separators
# 4. versions can contain A-Z, a-z, 0-9, separators
# 5. versions always start with a digit
- # 6. versions are often prefixed by a 'v' character
+ # 6. versions are often prefixed by a 'v' or 'r' character
# 7. separators are most reliable to determine name/version boundaries
# List of the following format:
@@ -450,7 +474,7 @@ def parse_version_offset(path):
(r'^[a-zA-Z+-]*(\d[\da-zA-Z-]*)$', stem),
# name_name_ver_ver
- # e.g. tinyxml_2_6_2, boost_1_55_0, tbb2017_20161128, v1_6_3
+ # e.g. tinyxml_2_6_2, boost_1_55_0, tbb2017_20161128
(r'^[a-zA-Z+_]*(\d[\da-zA-Z_]*)$', stem),
# name.name.ver.ver
@@ -476,6 +500,10 @@ def parse_version_offset(path):
# e.g. fer_source.v696
(r'^[a-zA-Z\d+_]+\.v?(\d[\da-zA-Z.]*)$', stem),
+ # name_ver-ver
+ # e.g. Bridger_r2014-12-01
+ (r'^[a-zA-Z\d+]+_r?(\d[\da-zA-Z-]*)$', stem),
+
# name-name-ver.ver-ver.ver
# e.g. sowing-1.1.23-p1, bib2xhtml-v3.0-15-gf506, 4.6.3-alpha04
(r'^(?:[a-zA-Z\d+-]+-)?v?(\d[\da-zA-Z.-]*)$', stem),
@@ -507,19 +535,17 @@ def parse_version_offset(path):
# e.g. STAR-CCM+11.06.010_02
(r'^[a-zA-Z+-]+(\d[\da-zA-Z._]*)$', stem),
+ # name-name_name-ver.ver
+ # e.g. PerlIO-utf8_strict-0.002
+ (r'^[a-zA-Z\d+_-]+-v?(\d[\da-zA-Z.]*)$', stem),
+
# 7th Pass: Specific VCS
# bazaar
# e.g. libvterm-0+bzr681
(r'bzr(\d[\da-zA-Z._-]*)$', stem),
- # 8th Pass: Version in path
-
- # github.com/repo/name/releases/download/vver/name
- # e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow
- (r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path), # noqa
-
- # 9th Pass: Query strings
+ # 8th Pass: Query strings
# e.g. https://gitlab.cosma.dur.ac.uk/api/v4/projects/swift%2Fswiftsim/repository/archive.tar.gz?sha=v0.3.0
(r'\?sha=[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)$', suffix),
@@ -528,13 +554,24 @@ def parse_version_offset(path):
(r'\?ref=[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)$', suffix),
# e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
- (r'\?version=v?(\d[\da-zA-Z._-]*)$', suffix),
+ # e.g. https://software.broadinstitute.org/gatk/download/auth?package=GATK-archive&version=3.8-1-0-gf15c1c3ef
+ (r'[?&]version=v?(\d[\da-zA-Z._-]*)$', suffix),
# e.g. http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz
- (r'\?filename=[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem),
+ # e.g. http://laws-green.lanl.gov/projects/data/eos/get_file.php?package=eospac&filename=eospac_v6.4.0beta.1_r20171213193219.tgz
+ (r'[?&]filename=[a-zA-Z\d+-]+[_-]v?(\d[\da-zA-Z.]*)', stem),
# e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz
- (r'\?package=[a-zA-Z\d+-]+&get=[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem), # noqa
+ (r'&get=[a-zA-Z\d+-]+-v?(\d[\da-zA-Z.]*)$', stem), # noqa
+
+ # 9th Pass: Version in path
+
+ # github.com/repo/name/releases/download/vver/name
+ # e.g. https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow
+ (r'github\.com/[^/]+/[^/]+/releases/download/[a-zA-Z+._-]*v?(\d[\da-zA-Z._-]*)/', path), # noqa
+
+ # e.g. ftp://ftp.ncbi.nlm.nih.gov/blast/executables/legacy.NOTSUPPORTED/2.2.26/ncbi.tar.gz
+ (r'(\d[\da-zA-Z._-]*)/[^/]+$', path),
]
for i, version_regex in enumerate(version_regexes):
@@ -662,6 +699,9 @@ def parse_name_offset(path, v=None):
# e.g. http://wwwpub.zih.tu-dresden.de/%7Emlieber/dcount/dcount.php?package=otf&get=OTF-1.12.5salmon.tar.gz
(r'\?package=([A-Za-z\d+-]+)', stem),
+ # ?package=name-version
+ (r'\?package=([A-Za-z\d]+)', suffix),
+
# download.php
# e.g. http://apps.fz-juelich.de/jsc/sionlib/download.php?version=1.7.1
(r'([^/]+)/download.php$', path),
diff --git a/lib/spack/spack/util/package_hash.py b/lib/spack/spack/util/package_hash.py
index 2a3ee80fd5..18b126486c 100644
--- a/lib/spack/spack/util/package_hash.py
+++ b/lib/spack/spack/util/package_hash.py
@@ -69,8 +69,17 @@ class TagMultiMethods(ast.NodeVisitor):
if node.decorator_list:
dec = node.decorator_list[0]
if isinstance(dec, ast.Call) and dec.func.id == 'when':
- cond = dec.args[0].s
- nodes.append((node, self.spec.satisfies(cond, strict=True)))
+ try:
+ cond = dec.args[0].s
+ nodes.append(
+ (node, self.spec.satisfies(cond, strict=True)))
+ except AttributeError:
+ # In this case the condition for the 'when' decorator is
+ # not a string literal (for example it may be a Python
+ # variable name). Therefore the function is added
+ # unconditionally since we don't know whether the
+ # constraint applies or not.
+ nodes.append((node, None))
else:
nodes.append((node, None))
diff --git a/lib/spack/spack/util/url.py b/lib/spack/spack/util/url.py
index 7ac12e7b81..29beb88ff9 100644
--- a/lib/spack/spack/util/url.py
+++ b/lib/spack/spack/util/url.py
@@ -9,6 +9,7 @@ Utility functions for parsing, formatting, and manipulating URLs.
import itertools
import os.path
+import re
from six import string_types
import six.moves.urllib.parse as urllib_parse
@@ -69,8 +70,7 @@ def parse(url, scheme='file'):
if scheme == 'file':
path = spack.util.path.canonicalize_path(netloc + path)
- while path.startswith('//'):
- path = path[1:]
+ path = re.sub(r'^/+', '/', path)
netloc = ''
return urllib_parse.ParseResult(scheme=scheme,
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 1fe58d6415..5c76deda2b 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -15,9 +15,6 @@ import ssl
import sys
import traceback
-from itertools import product
-
-import six
from six.moves.urllib.request import urlopen, Request
from six.moves.urllib.error import URLError
import multiprocessing.pool
@@ -50,30 +47,6 @@ from spack.util.compression import ALLOWED_ARCHIVE_TYPES
# Timeout in seconds for web requests
_timeout = 10
-# See docstring for standardize_header_names()
-_separators = ('', ' ', '_', '-')
-HTTP_HEADER_NAME_ALIASES = {
- "Accept-ranges": set(
- ''.join((A, 'ccept', sep, R, 'anges'))
- for A, sep, R in product('Aa', _separators, 'Rr')),
-
- "Content-length": set(
- ''.join((C, 'ontent', sep, L, 'ength'))
- for C, sep, L in product('Cc', _separators, 'Ll')),
-
- "Content-type": set(
- ''.join((C, 'ontent', sep, T, 'ype'))
- for C, sep, T in product('Cc', _separators, 'Tt')),
-
- "Date": set(('Date', 'date')),
-
- "Last-modified": set(
- ''.join((L, 'ast', sep, M, 'odified'))
- for L, sep, M in product('Ll', _separators, 'Mm')),
-
- "Server": set(('Server', 'server'))
-}
-
class LinkParser(HTMLParser):
"""This parser just takes an HTML page and strips out the hrefs on the
@@ -173,7 +146,7 @@ def read_from_url(url, accept_content_type=None):
req.get_method = lambda: "HEAD"
resp = _urlopen(req, timeout=_timeout, context=context)
- content_type = resp.headers.get('Content-type')
+ content_type = get_header(resp.headers, 'Content-type')
# Do the real GET request when we know it's just HTML.
req.get_method = lambda: "GET"
@@ -185,7 +158,7 @@ def read_from_url(url, accept_content_type=None):
ERROR=str(err)))
if accept_content_type and not is_web_url:
- content_type = response.headers.get('Content-type')
+ content_type = get_header(response.headers, 'Content-type')
reject_content_type = (
accept_content_type and (
@@ -208,9 +181,8 @@ def warn_no_ssl_cert_checking():
"your Python to enable certificate verification.")
-def push_to_url(local_file_path, remote_path, **kwargs):
- keep_original = kwargs.get('keep_original', True)
-
+def push_to_url(
+ local_file_path, remote_path, keep_original=True, extra_args=None):
remote_url = url_util.parse(remote_path)
verify_ssl = spack.config.get('config:verify_ssl')
@@ -235,7 +207,8 @@ def push_to_url(local_file_path, remote_path, **kwargs):
os.remove(local_file_path)
elif remote_url.scheme == 's3':
- extra_args = kwargs.get('extra_args', {})
+ if extra_args is None:
+ extra_args = {}
remote_path = remote_url.path
while remote_path.startswith('/'):
@@ -296,10 +269,25 @@ def remove_url(url):
# Don't even try for other URL schemes.
-def _list_s3_objects(client, url, num_entries, start_after=None):
+def _iter_s3_contents(contents, prefix):
+ for entry in contents:
+ key = entry['Key']
+
+ if not key.startswith('/'):
+ key = '/' + key
+
+ key = os.path.relpath(key, prefix)
+
+ if key == '.':
+ continue
+
+ yield key
+
+
+def _list_s3_objects(client, bucket, prefix, num_entries, start_after=None):
list_args = dict(
- Bucket=url.netloc,
- Prefix=url.path,
+ Bucket=bucket,
+ Prefix=prefix[1:],
MaxKeys=num_entries)
if start_after is not None:
@@ -311,21 +299,19 @@ def _list_s3_objects(client, url, num_entries, start_after=None):
if result['IsTruncated']:
last_key = result['Contents'][-1]['Key']
- iter = (key for key in
- (
- os.path.relpath(entry['Key'], url.path)
- for entry in result['Contents']
- )
- if key != '.')
+ iter = _iter_s3_contents(result['Contents'], prefix)
return iter, last_key
def _iter_s3_prefix(client, url, num_entries=1024):
key = None
+ bucket = url.netloc
+ prefix = re.sub(r'^/*', '/', url.path)
+
while True:
contents, key = _list_s3_objects(
- client, url, num_entries, start_after=key)
+ client, bucket, prefix, num_entries, start_after=key)
for x in contents:
yield x
@@ -577,106 +563,34 @@ def find_versions_of_archive(archive_urls, list_url=None, list_depth=0):
return versions
-def standardize_header_names(headers):
- """Replace certain header names with standardized spellings.
-
- Standardizes the spellings of the following header names:
- - Accept-ranges
- - Content-length
- - Content-type
- - Date
- - Last-modified
- - Server
-
- Every name considered is translated to one of the above names if the only
- difference between the two is how the first letters of each word are
- capitalized; whether words are separated; or, if separated, whether they
- are so by a dash (-), underscore (_), or space ( ). Header names that
- cannot be mapped as described above are returned unaltered.
-
- For example: The standard spelling of "Content-length" would be substituted
- for any of the following names:
- - Content-length
- - content_length
- - contentlength
- - content_Length
- - contentLength
- - content Length
-
- ... and any other header name, such as "Content-encoding", would not be
- altered, regardless of spelling.
-
- If headers is a string, then it (or an appropriate substitute) is returned.
-
- If headers is a non-empty tuple, headers[0] is a string, and there exists a
- standardized spelling for header[0] that differs from it, then a new tuple
- is returned. This tuple has the same elements as headers, except the first
- element is the standardized spelling for headers[0].
-
- If headers is a sequence, then a new list is considered, where each element
- is its corresponding element in headers, but mapped as above if a string or
- tuple. This new list is returned if at least one of its elements differ
- from their corrsponding element in headers.
-
- If headers is a mapping, then a new dict is considered, where the key in
- each item is the key of its corresponding item in headers, mapped as above
- if a string or tuple. The value is taken from the corresponding item. If
- the keys of multiple items in headers map to the same key after being
- standardized, then the value for the resulting item is undefined. The new
- dict is returned if at least one of its items has a key that differs from
- that of their corresponding item in headers, or if the keys of multiple
- items in headers map to the same key after being standardized.
-
- In all other cases headers is returned unaltered.
- """
- if isinstance(headers, six.string_types):
- for standardized_spelling, other_spellings in (
- HTTP_HEADER_NAME_ALIASES.items()):
- if headers in other_spellings:
- if headers == standardized_spelling:
- return headers
- return standardized_spelling
- return headers
-
- if isinstance(headers, tuple):
- if not headers:
- return headers
- old = headers[0]
- if isinstance(old, six.string_types):
- new = standardize_header_names(old)
- if old is not new:
- return (new,) + headers[1:]
- return headers
-
- try:
- changed = False
- new_dict = {}
- for key, value in headers.items():
- if isinstance(key, (tuple, six.string_types)):
- old_key, key = key, standardize_header_names(key)
- changed = changed or key is not old_key
-
- new_dict[key] = value
+def get_header(headers, header_name):
+ """Looks up a dict of headers for the given header value.
- return new_dict if changed else headers
- except (AttributeError, TypeError, ValueError):
- pass
+ Looks up a dict of headers, [headers], for a header value given by
+ [header_name]. Returns headers[header_name] if header_name is in headers.
+ Otherwise, the first fuzzy match is returned, if any.
- try:
- changed = False
- new_list = []
- for item in headers:
- if isinstance(item, (tuple, six.string_types)):
- old_item, item = item, standardize_header_names(item)
- changed = changed or item is not old_item
+ This fuzzy matching is performed by discarding word separators and
+ capitalization, so that for example, "Content-length", "content_length",
+ "conTENtLength", etc., all match. In the case of multiple fuzzy-matches,
+ the returned value is the "first" such match given the underlying mapping's
+ ordering, or unspecified if no such ordering is defined.
- new_list.append(item)
+ If header_name is not in headers, and no such fuzzy match exists, then a
+ KeyError is raised.
+ """
- return new_list if changed else headers
- except TypeError:
- pass
+ def unfuzz(header):
+ return re.sub(r'[ _-]', '', header).lower()
- return headers
+ try:
+ return headers[header_name]
+ except KeyError:
+ unfuzzed_header_name = unfuzz(header_name)
+ for header, value in headers.items():
+ if unfuzz(header) == unfuzzed_header_name:
+ return value
+ raise
class SpackWebError(spack.error.SpackError):
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index 7eea243b06..bdee9d3552 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -600,7 +600,9 @@ def substitute_abstract_variants(spec):
for name, v in spec.variants.items():
if name in spack.directives.reserved_names:
continue
- pkg_variant = spec.package_class.variants[name]
+ pkg_variant = spec.package_class.variants.get(name, None)
+ if not pkg_variant:
+ raise UnknownVariantError(spec, [name])
new_variant = pkg_variant.make_variant(v._original_value)
pkg_variant.validate_or_raise(new_variant, spec.package_class)
spec.variants.substitute(new_variant)
@@ -778,12 +780,13 @@ class DuplicateVariantError(error.SpecError):
class UnknownVariantError(error.SpecError):
"""Raised when an unknown variant occurs in a spec."""
-
- def __init__(self, pkg, variants):
+ def __init__(self, spec, variants):
self.unknown_variants = variants
- super(UnknownVariantError, self).__init__(
- 'Package {0} has no variant {1}!'.format(pkg, comma_or(variants))
- )
+ variant_str = 'variant' if len(variants) == 1 else 'variants'
+ msg = ('trying to set {0} "{1}" in package "{2}", but the package'
+ ' has no such {0} [happened during concretization of {3}]')
+ msg = msg.format(variant_str, comma_or(variants), spec.name, spec.root)
+ super(UnknownVariantError, self).__init__(msg)
class InconsistentValidationError(error.SpecError):