summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/dependency.py48
-rw-r--r--lib/spack/spack/directives.py123
-rw-r--r--lib/spack/spack/package.py78
-rw-r--r--lib/spack/spack/patch.py93
-rw-r--r--lib/spack/spack/spec.py124
-rw-r--r--lib/spack/spack/test/cmd/dependents.py6
-rw-r--r--lib/spack/spack/test/conftest.py5
-rw-r--r--lib/spack/spack/test/data/patch/bar.txt1
-rw-r--r--lib/spack/spack/test/data/patch/foo.patch7
-rw-r--r--lib/spack/spack/test/data/patch/foo.tgzbin116 -> 229 bytes
-rw-r--r--lib/spack/spack/test/patch.py198
-rw-r--r--lib/spack/spack/test/spec_dag.py4
-rw-r--r--lib/spack/spack/variant.py23
13 files changed, 555 insertions, 155 deletions
diff --git a/lib/spack/spack/dependency.py b/lib/spack/spack/dependency.py
index 996dfa20c4..40494476bf 100644
--- a/lib/spack/spack/dependency.py
+++ b/lib/spack/spack/dependency.py
@@ -26,6 +26,9 @@
"""
from six import string_types
+import spack
+
+
#: The types of dependency relationships that Spack understands.
all_deptypes = ('build', 'link', 'run', 'test')
@@ -78,13 +81,52 @@ class Dependency(object):
e.g. whether it is required for building the package, whether it
needs to be linked to, or whether it is needed at runtime so that
Spack can call commands from it.
+
+ A package can also depend on another package with *patches*. This is
+ for cases where the maintainers of one package also maintain special
+ patches for their dependencies. If one package depends on another
+ with patches, a special version of that dependency with patches
+ applied will be built for use by the dependent package. The patches
+ are included in the new version's spec hash to differentiate it from
+ unpatched versions of the same package, so that unpatched versions of
+ the dependency package can coexist with the patched version.
+
"""
- def __init__(self, spec, type=default_deptype):
+ def __init__(self, pkg, spec, type=default_deptype):
"""Create a new Dependency.
Args:
+ pkg (type): Package that has this dependency
spec (Spec): Spec indicating dependency requirements
type (sequence): strings describing dependency relationship
"""
- self.spec = spec
- self.type = set(type)
+ assert isinstance(spec, spack.spec.Spec)
+
+ self.pkg = pkg
+ self.spec = spec.copy()
+
+ # This dict maps condition specs to lists of Patch objects, just
+ # as the patches dict on packages does.
+ self.patches = {}
+
+ if type is None:
+ self.type = set(default_deptype)
+ else:
+ self.type = set(type)
+
+ @property
+ def name(self):
+ """Get the name of the dependency package."""
+ return self.spec.name
+
+ def merge(self, other):
+ """Merge constraints, deptypes, and patches of other into self."""
+ self.spec.constrain(other.spec)
+ self.type |= other.type
+
+ # concatenate patch lists, or just copy them in
+ for cond, p in other.patches.items():
+ if cond in self.patches:
+ self.patches[cond].extend(other.patches[cond])
+ else:
+ self.patches[cond] = other.patches[cond]
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 57063c7f63..cf12b16dd6 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -94,30 +94,26 @@ class DirectiveMetaMixin(type):
try:
directive_from_base = base._directives_to_be_executed
attr_dict['_directives_to_be_executed'].extend(
- directive_from_base
- )
+ directive_from_base)
except AttributeError:
# The base class didn't have the required attribute.
# Continue searching
pass
+
# De-duplicates directives from base classes
attr_dict['_directives_to_be_executed'] = [
x for x in llnl.util.lang.dedupe(
- attr_dict['_directives_to_be_executed']
- )
- ]
+ attr_dict['_directives_to_be_executed'])]
# Move things to be executed from module scope (where they
# are collected first) to class scope
if DirectiveMetaMixin._directives_to_be_executed:
attr_dict['_directives_to_be_executed'].extend(
- DirectiveMetaMixin._directives_to_be_executed
- )
+ DirectiveMetaMixin._directives_to_be_executed)
DirectiveMetaMixin._directives_to_be_executed = []
return super(DirectiveMetaMixin, mcs).__new__(
- mcs, name, bases, attr_dict
- )
+ mcs, name, bases, attr_dict)
def __init__(cls, name, bases, attr_dict):
# The class is being created: if it is a package we must ensure
@@ -128,14 +124,20 @@ class DirectiveMetaMixin(type):
# from llnl.util.lang.get_calling_module_name
pkg_name = module.__name__.split('.')[-1]
setattr(cls, 'name', pkg_name)
+
# Ensure the presence of the dictionaries associated
# with the directives
for d in DirectiveMetaMixin._directive_names:
setattr(cls, d, {})
- # Lazy execution of directives
+
+ # Lazily execute directives
for directive in cls._directives_to_be_executed:
directive(cls)
+ # Ignore any directives executed *within* top-level
+ # directives by clearing out the queue they're appended to
+ DirectiveMetaMixin._directives_to_be_executed = []
+
super(DirectiveMetaMixin, cls).__init__(name, bases, attr_dict)
@staticmethod
@@ -194,15 +196,44 @@ class DirectiveMetaMixin(type):
@functools.wraps(decorated_function)
def _wrapper(*args, **kwargs):
+ # If any of the arguments are executors returned by a
+ # directive passed as an argument, don't execute them
+ # lazily. Instead, let the called directive handle them.
+ # This allows nested directive calls in packages. The
+ # caller can return the directive if it should be queued.
+ def remove_directives(arg):
+ directives = DirectiveMetaMixin._directives_to_be_executed
+ if isinstance(arg, (list, tuple)):
+ # Descend into args that are lists or tuples
+ for a in arg:
+ remove_directives(a)
+ else:
+ # Remove directives args from the exec queue
+ remove = next(
+ (d for d in directives if d is arg), None)
+ if remove is not None:
+ directives.remove(remove)
+
+ # Nasty, but it's the best way I can think of to avoid
+ # side effects if directive results are passed as args
+ remove_directives(args)
+ remove_directives(kwargs.values())
+
# A directive returns either something that is callable on a
# package or a sequence of them
- values = decorated_function(*args, **kwargs)
+ result = decorated_function(*args, **kwargs)
# ...so if it is not a sequence make it so
+ values = result
if not isinstance(values, collections.Sequence):
values = (values, )
DirectiveMetaMixin._directives_to_be_executed.extend(values)
+
+ # wrapped function returns same result as original so
+ # that we can nest directives
+ return result
+
return _wrapper
return _decorator
@@ -229,7 +260,7 @@ def version(ver, checksum=None, **kwargs):
return _execute_version
-def _depends_on(pkg, spec, when=None, type=default_deptype):
+def _depends_on(pkg, spec, when=None, type=default_deptype, patches=None):
# If when is False do nothing
if when is False:
return
@@ -245,13 +276,36 @@ def _depends_on(pkg, spec, when=None, type=default_deptype):
type = canonical_deptype(type)
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
+
+ # call this patches here for clarity -- we want patch to be a list,
+ # but the caller doesn't have to make it one.
+ if patches and dep_spec.virtual:
+ raise DependencyPatchError("Cannot patch a virtual dependency.")
+
+ # ensure patches is a list
+ if patches is None:
+ patches = []
+ elif not isinstance(patches, (list, tuple)):
+ patches = [patches]
+
+ # auto-call patch() directive on any strings in patch list
+ patches = [patch(p) if isinstance(p, string_types)
+ else p for p in patches]
+ assert all(callable(p) for p in patches)
+
+ # this is where we actually add the dependency to this package
if when_spec not in conditions:
- conditions[when_spec] = Dependency(dep_spec, type)
+ dependency = Dependency(pkg, dep_spec, type=type)
+ conditions[when_spec] = dependency
else:
dependency = conditions[when_spec]
dependency.spec.constrain(dep_spec, deps=False)
dependency.type |= set(type)
+ # apply patches to the dependency
+ for execute_patch in patches:
+ execute_patch(dependency)
+
@directive('conflicts')
def conflicts(conflict_spec, when=None, msg=None):
@@ -285,13 +339,24 @@ def conflicts(conflict_spec, when=None, msg=None):
@directive(('dependencies'))
-def depends_on(spec, when=None, type=default_deptype):
+def depends_on(spec, when=None, type=default_deptype, patches=None):
"""Creates a dict of deps with specs defining when they apply.
+
+ Args:
+ spec (Spec or str): the package and constraints depended on
+ when (Spec or str): when the dependent satisfies this, it has
+ the dependency represented by ``spec``
+ type (str or tuple of str): str or tuple of legal Spack deptypes
+ patches (obj or list): single result of ``patch()`` directive, a
+ ``str`` to be passed to ``patch``, or a list of these
+
This directive is to be used inside a Package definition to declare
that the package requires other packages to be built first.
- @see The section "Dependency specs" in the Spack Packaging Guide."""
+ @see The section "Dependency specs" in the Spack Packaging Guide.
+
+ """
def _execute_depends_on(pkg):
- _depends_on(pkg, spec, when=when, type=type)
+ _depends_on(pkg, spec, when=when, type=type, patches=patches)
return _execute_depends_on
@@ -356,20 +421,23 @@ def patch(url_or_filename, level=1, when=None, **kwargs):
level (int): patch level (as in the patch shell command)
when (Spec): optional anonymous spec that specifies when to apply
the patch
- **kwargs: the following list of keywords is supported
- - md5 (str): md5 sum of the patch (used to verify the file
- if it comes from a url)
+ Keyword Args:
+ sha256 (str): sha256 sum of the patch, used to verify the patch
+ (only required for URL patches)
+ archive_sha256 (str): sha256 sum of the *archive*, if the patch
+ is compressed (only required for compressed URL patches)
"""
- def _execute_patch(pkg):
- constraint = pkg.name if when is None else when
- when_spec = parse_anonymous_spec(constraint, pkg.name)
+ def _execute_patch(pkg_or_dep):
+ constraint = pkg_or_dep.name if when is None else when
+ when_spec = parse_anonymous_spec(constraint, pkg_or_dep.name)
# if this spec is identical to some other, then append this
# patch to the existing list.
- cur_patches = pkg.patches.setdefault(when_spec, [])
- cur_patches.append(Patch.create(pkg, url_or_filename, level, **kwargs))
+ cur_patches = pkg_or_dep.patches.setdefault(when_spec, [])
+ cur_patches.append(
+ Patch.create(pkg_or_dep, url_or_filename, level, **kwargs))
return _execute_patch
@@ -381,8 +449,7 @@ def variant(
description='',
values=None,
multi=False,
- validator=None
-):
+ validator=None):
"""Define a variant for the package. Packager can specify a default
value as well as a text description.
@@ -484,3 +551,7 @@ class DirectiveError(spack.error.SpackError):
class CircularReferenceError(DirectiveError):
"""This is raised when something depends on itself."""
+
+
+class DependencyPatchError(DirectiveError):
+ """Raised for errors with patching dependencies."""
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 1c722082e8..57653d7d8b 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -542,9 +542,12 @@ class PackageBase(with_metaclass(PackageMeta, object)):
#: Defaults to the empty string.
license_url = ''
- # Verbosity level, preserved across installs.
+ #: Verbosity level, preserved across installs.
_verbose = None
+ #: index of patches by sha256 sum, built lazily
+ _patches_by_hash = None
+
#: List of strings which contains GitHub usernames of package maintainers.
#: Do not include @ here in order not to unnecessarily ping the users.
maintainers = []
@@ -642,7 +645,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
@property
def package_dir(self):
"""Return the directory where the package.py file lives."""
- return os.path.dirname(self.module.__file__)
+ return os.path.abspath(os.path.dirname(self.module.__file__))
@property
def global_license_dir(self):
@@ -990,9 +993,42 @@ class PackageBase(with_metaclass(PackageMeta, object)):
self.stage.expand_archive()
self.stage.chdir_to_source()
+ @classmethod
+ def lookup_patch(cls, sha256):
+ """Look up a patch associated with this package by its sha256 sum.
+
+ Args:
+ sha256 (str): sha256 sum of the patch to look up
+
+ Returns:
+ (Patch): ``Patch`` object with the given hash, or ``None`` if
+ not found.
+
+ To do the lookup, we build an index lazily. This allows us to
+ avoid computing a sha256 for *every* patch and on every package
+ load. With lazy hashing, we only compute hashes on lookup, which
+ usually happens at build time.
+
+ """
+ if cls._patches_by_hash is None:
+ cls._patches_by_hash = {}
+
+ # Add patches from the class
+ for cond, patch_list in cls.patches.items():
+ for patch in patch_list:
+ cls._patches_by_hash[patch.sha256] = patch
+
+ # and patches on dependencies
+ for name, conditions in cls.dependencies.items():
+ for cond, dependency in conditions.items():
+ for pcond, patch_list in dependency.patches.items():
+ for patch in patch_list:
+ cls._patches_by_hash[patch.sha256] = patch
+
+ return cls._patches_by_hash.get(sha256, None)
+
def do_patch(self):
- """Calls do_stage(), then applied patches to the expanded tarball if they
- haven't been applied already."""
+ """Applies patches if they haven't been applied already."""
if not self.spec.concrete:
raise ValueError("Can only patch concrete packages.")
@@ -1002,8 +1038,11 @@ class PackageBase(with_metaclass(PackageMeta, object)):
# Package can add its own patch function.
has_patch_fun = hasattr(self, 'patch') and callable(self.patch)
+ # Get the patches from the spec (this is a shortcut for the MV-variant)
+ patches = self.spec.patches
+
# If there are no patches, note it.
- if not self.patches and not has_patch_fun:
+ if not patches and not has_patch_fun:
tty.msg("No patches needed for %s" % self.name)
return
@@ -1032,18 +1071,16 @@ class PackageBase(with_metaclass(PackageMeta, object)):
# Apply all the patches for specs that match this one
patched = False
- for spec, patch_list in self.patches.items():
- if self.spec.satisfies(spec):
- for patch in patch_list:
- try:
- patch.apply(self.stage)
- tty.msg('Applied patch %s' % patch.path_or_url)
- patched = True
- except:
- # Touch bad file if anything goes wrong.
- tty.msg('Patch %s failed.' % patch.path_or_url)
- touch(bad_file)
- raise
+ for patch in patches:
+ try:
+ patch.apply(self.stage)
+ tty.msg('Applied patch %s' % patch.path_or_url)
+ patched = True
+ except:
+ # Touch bad file if anything goes wrong.
+ tty.msg('Patch %s failed.' % patch.path_or_url)
+ touch(bad_file)
+ raise
if has_patch_fun:
try:
@@ -1054,9 +1091,10 @@ class PackageBase(with_metaclass(PackageMeta, object)):
# We are running a multimethod without a default case.
# If there's no default it means we don't need to patch.
if not patched:
- # if we didn't apply a patch, AND the patch function
- # didn't apply, say no patches are needed.
- # Otherwise, we already said we applied each patch.
+ # if we didn't apply a patch from a patch()
+ # directive, AND the patch function didn't apply, say
+ # no patches are needed. Otherwise, we already
+ # printed a message for each patch.
tty.msg("No patches needed for %s" % self.name)
except:
tty.msg("patch() function failed for %s" % self.name)
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index 3f0596b23a..2f52fbdc9f 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -25,13 +25,16 @@
import os
import os.path
import inspect
+import hashlib
import spack
import spack.error
import spack.fetch_strategy as fs
import spack.stage
+from spack.util.crypto import checksum, Checker
from llnl.util.filesystem import working_dir
from spack.util.executable import which
+from spack.util.compression import allowed_archive
def absolute_path_for_package(pkg):
@@ -39,8 +42,10 @@ def absolute_path_for_package(pkg):
the recipe for the package passed as argument.
Args:
- pkg: a valid package object
+ pkg: a valid package object, or a Dependency object.
"""
+ if isinstance(pkg, spack.dependency.Dependency):
+ pkg = pkg.pkg
m = inspect.getmodule(pkg)
return os.path.abspath(m.__file__)
@@ -51,7 +56,7 @@ class Patch(object):
"""
@staticmethod
- def create(pkg, path_or_url, level, **kwargs):
+ def create(pkg, path_or_url, level=1, **kwargs):
"""
Factory method that creates an instance of some class derived from
Patch
@@ -59,18 +64,18 @@ class Patch(object):
Args:
pkg: package that needs to be patched
path_or_url: path or url where the patch is found
- level: patch level
+ level: patch level (default 1)
Returns:
instance of some Patch class
"""
# Check if we are dealing with a URL
if '://' in path_or_url:
- return UrlPatch(pkg, path_or_url, level, **kwargs)
+ return UrlPatch(path_or_url, level, **kwargs)
# Assume patches are stored in the repository
return FilePatch(pkg, path_or_url, level)
- def __init__(self, pkg, path_or_url, level):
+ def __init__(self, path_or_url, level):
# Check on level (must be an integer > 0)
if not isinstance(level, int) or not level >= 0:
raise ValueError("Patch level needs to be a non-negative integer.")
@@ -100,20 +105,39 @@ class Patch(object):
class FilePatch(Patch):
"""Describes a patch that is retrieved from a file in the repository"""
def __init__(self, pkg, path_or_url, level):
- super(FilePatch, self).__init__(pkg, path_or_url, level)
+ super(FilePatch, self).__init__(path_or_url, level)
pkg_dir = os.path.dirname(absolute_path_for_package(pkg))
self.path = os.path.join(pkg_dir, path_or_url)
if not os.path.isfile(self.path):
- raise NoSuchPatchFileError(pkg.name, self.path)
+ raise NoSuchPatchError(
+ "No such patch for package %s: %s" % (pkg.name, self.path))
+ self._sha256 = None
+
+ @property
+ def sha256(self):
+ if self._sha256 is None:
+ self._sha256 = checksum(hashlib.sha256, self.path)
+ return self._sha256
class UrlPatch(Patch):
"""Describes a patch that is retrieved from a URL"""
- def __init__(self, pkg, path_or_url, level, **kwargs):
- super(UrlPatch, self).__init__(pkg, path_or_url, level)
+ def __init__(self, path_or_url, level, **kwargs):
+ super(UrlPatch, self).__init__(path_or_url, level)
self.url = path_or_url
- self.md5 = kwargs.get('md5')
+
+ self.archive_sha256 = None
+ if allowed_archive(self.url):
+ if 'archive_sha256' not in kwargs:
+ raise PatchDirectiveError(
+ "Compressed patches require 'archive_sha256' "
+ "and patch 'sha256' attributes: %s" % self.url)
+ self.archive_sha256 = kwargs.get('archive_sha256')
+
+ if 'sha256' not in kwargs:
+ raise PatchDirectiveError("URL patches require a sha256 checksum")
+ self.sha256 = kwargs.get('sha256')
def apply(self, stage):
"""Retrieve the patch in a temporary stage, computes
@@ -122,7 +146,12 @@ class UrlPatch(Patch):
Args:
stage: stage for the package that needs to be patched
"""
- fetcher = fs.URLFetchStrategy(self.url, digest=self.md5)
+ # use archive digest for compressed archives
+ fetch_digest = self.sha256
+ if self.archive_sha256:
+ fetch_digest = self.archive_sha256
+
+ fetcher = fs.URLFetchStrategy(self.url, digest=fetch_digest)
mirror = os.path.join(
os.path.dirname(stage.mirror_path),
os.path.basename(self.url))
@@ -132,20 +161,40 @@ class UrlPatch(Patch):
patch_stage.check()
patch_stage.cache_local()
- if spack.util.compression.allowed_archive(self.url):
+ root = patch_stage.path
+ if self.archive_sha256:
patch_stage.expand_archive()
-
- self.path = os.path.abspath(
- os.listdir(patch_stage.path).pop())
+ root = patch_stage.source_path
+
+ files = os.listdir(root)
+ if not files:
+ if self.archive_sha256:
+ raise NoSuchPatchError(
+ "Archive was empty: %s" % self.url)
+ else:
+ raise NoSuchPatchError(
+ "Patch failed to download: %s" % self.url)
+
+ self.path = os.path.join(root, files.pop())
+
+ if not os.path.isfile(self.path):
+ raise NoSuchPatchError(
+ "Archive %s contains no patch file!" % self.url)
+
+ # for a compressed archive, Need to check the patch sha256 again
+ # and the patch is in a directory, not in the same place
+ if self.archive_sha256:
+ if not Checker(self.sha256).check(self.path):
+ raise fs.ChecksumError(
+ "sha256 checksum failed for %s" % self.path,
+ "Expected %s but got %s" % (self.sha256, checker.sum))
super(UrlPatch, self).apply(stage)
-class NoSuchPatchFileError(spack.error.SpackError):
- """Raised when user specifies a patch file that doesn't exist."""
+class NoSuchPatchError(spack.error.SpackError):
+ """Raised when a patch file doesn't exist."""
+
- def __init__(self, package, path):
- super(NoSuchPatchFileError, self).__init__(
- "No such patch file for package %s: %s" % (package, path))
- self.package = package
- self.path = path
+class PatchDirectiveError(spack.error.SpackError):
+ """Raised when the wrong arguments are suppled to the patch directive."""
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index dc2042a7f4..d919aeecb7 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1488,8 +1488,7 @@ class Spec(object):
spec.compiler_flags[name] = value
else:
spec.variants[name] = MultiValuedVariant.from_node_dict(
- name, value
- )
+ name, value)
elif 'variants' in node:
for name, value in node['variants'].items():
spec.variants[name] = MultiValuedVariant.from_node_dict(
@@ -1806,6 +1805,43 @@ class Spec(object):
if s.namespace is None:
s.namespace = spack.repo.repo_for_pkg(s.name).namespace
+ # Add any patches from the package to the spec.
+ patches = []
+ for cond, patch_list in s.package_class.patches.items():
+ if s.satisfies(cond):
+ for patch in patch_list:
+ patches.append(patch.sha256)
+ if patches:
+ # Special-case: keeps variant values unique but ordered.
+ s.variants['patches'] = MultiValuedVariant('patches', ())
+ mvar = s.variants['patches']
+ mvar._value = mvar._original_value = tuple(dedupe(patches))
+
+ # Apply patches required on dependencies by depends_on(..., patch=...)
+ for dspec in self.traverse_edges(deptype=all,
+ cover='edges', root=False):
+ pkg_deps = dspec.parent.package_class.dependencies
+ if dspec.spec.name not in pkg_deps:
+ continue
+
+ patches = []
+ for cond, dependency in pkg_deps[dspec.spec.name].items():
+ if dspec.parent.satisfies(cond):
+ for pcond, patch_list in dependency.patches.items():
+ if dspec.spec.satisfies(pcond):
+ for patch in patch_list:
+ patches.append(patch.sha256)
+ if patches:
+ # note that we use a special multi-valued variant and
+ # keep the patches ordered.
+ if 'patches' not in dspec.spec.variants:
+ mvar = MultiValuedVariant('patches', ())
+ dspec.spec.variants['patches'] = mvar
+ else:
+ mvar = dspec.spec.variants['patches']
+ mvar._value = mvar._original_value = tuple(
+ dedupe(list(mvar._value) + patches))
+
for s in self.traverse():
if s.external_module:
compiler = spack.compilers.compiler_for_spec(
@@ -1908,7 +1944,7 @@ class Spec(object):
name (str): name of dependency to evaluate conditions on.
Returns:
- (tuple): tuple of ``Spec`` and tuple of ``deptypes``.
+ (Dependency): new Dependency object combining all constraints.
If the package depends on <name> in the current spec
configuration, return the constrained dependency and
@@ -1922,21 +1958,19 @@ class Spec(object):
substitute_abstract_variants(self)
# evaluate when specs to figure out constraints on the dependency.
- dep, deptypes = None, None
+ dep = None
for when_spec, dependency in conditions.items():
if self.satisfies(when_spec, strict=True):
if dep is None:
- dep = Spec(name)
- deptypes = set()
+ dep = Dependency(self.name, Spec(name), type=())
try:
- dep.constrain(dependency.spec)
- deptypes |= dependency.type
+ dep.merge(dependency)
except UnsatisfiableSpecError as e:
e.message = ("Conflicting conditional dependencies on"
"package %s for spec %s" % (self.name, self))
raise e
- return dep, deptypes
+ return dep
def _find_provider(self, vdep, provider_index):
"""Find provider for a virtual spec in the provider index.
@@ -1971,15 +2005,26 @@ class Spec(object):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
- def _merge_dependency(self, dep, deptypes, visited, spec_deps,
- provider_index):
- """Merge the dependency into this spec.
+ def _merge_dependency(
+ self, dependency, visited, spec_deps, provider_index):
+ """Merge dependency information from a Package into this Spec.
- Caller should assume that this routine can owns the dep parameter
- (i.e. it needs to be a copy of any internal structures like
- dependencies on Package class objects).
-
- This is the core of normalize(). There are some basic steps:
+ Args:
+ dependency (Dependency): dependency metadata from a package;
+ this is typically the result of merging *all* matching
+ dependency constraints from the package.
+ visited (set): set of dependency nodes already visited by
+ ``normalize()``.
+ spec_deps (dict): ``dict`` of all dependencies from the spec
+ being normalized.
+ provider_index (dict): ``provider_index`` of virtual dep
+ providers in the ``Spec`` as normalized so far.
+
+ NOTE: Caller should assume that this routine owns the
+ ``dependency`` parameter, i.e., it needs to be a copy of any
+ internal structures.
+
+ This is the core of ``normalize()``. There are some basic steps:
* If dep is virtual, evaluate whether it corresponds to an
existing concrete dependency, and merge if so.
@@ -1994,6 +2039,7 @@ class Spec(object):
"""
changed = False
+ dep = dependency.spec
# If it's a virtual dependency, try to find an existing
# provider in the spec, and merge that.
@@ -2045,11 +2091,11 @@ class Spec(object):
raise
# Add merged spec to my deps and recurse
- dependency = spec_deps[dep.name]
+ spec_dependency = spec_deps[dep.name]
if dep.name not in self._dependencies:
- self._add_dependency(dependency, deptypes)
+ self._add_dependency(spec_dependency, dependency.type)
- changed |= dependency._normalize_helper(
+ changed |= spec_dependency._normalize_helper(
visited, spec_deps, provider_index)
return changed
@@ -2074,12 +2120,12 @@ class Spec(object):
changed = False
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
- dep, deptypes = self._evaluate_dependency_conditions(dep_name)
+ dep = self._evaluate_dependency_conditions(dep_name)
# If dep is a needed dependency, merge it.
if dep and (spack.package_testing.check(self.name) or
- set(deptypes) - set(['test'])):
+ set(dep.type) - set(['test'])):
changed |= self._merge_dependency(
- dep, deptypes, visited, spec_deps, provider_index)
+ dep, visited, spec_deps, provider_index)
any_change |= changed
return any_change
@@ -2463,6 +2509,35 @@ class Spec(object):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spec.virtual]
+ @property
+ def patches(self):
+ """Return patch objects for any patch sha256 sums on this Spec.
+
+ This is for use after concretization to iterate over any patches
+ associated with this spec.
+
+ TODO: this only checks in the package; it doesn't resurrect old
+ patches from install directories, but it probably should.
+ """
+ if 'patches' not in self.variants:
+ return []
+
+ patches = []
+ for sha256 in self.variants['patches'].value:
+ patch = self.package.lookup_patch(sha256)
+ if patch:
+ patches.append(patch)
+ continue
+
+ # if not found in this package, check immediate dependents
+ # for dependency patches
+ for dep in self._dependents:
+ patch = dep.parent.package.lookup_patch(sha256)
+ if patch:
+ patches.append(patch)
+
+ return patches
+
def _dup(self, other, deps=True, cleardeps=True, caches=None):
"""Copy the spec other into self. This is an overwriting
copy. It does not copy any dependents (parents), but by default
@@ -2549,7 +2624,8 @@ class Spec(object):
def _dup_deps(self, other, deptypes, caches):
new_specs = {self.name: self}
- for dspec in other.traverse_edges(cover='edges', root=False):
+ for dspec in other.traverse_edges(cover='edges',
+ root=False):
if (dspec.deptypes and
not any(d in deptypes for d in dspec.deptypes)):
continue
diff --git a/lib/spack/spack/test/cmd/dependents.py b/lib/spack/spack/test/cmd/dependents.py
index c43270a2af..00a8f8168d 100644
--- a/lib/spack/spack/test/cmd/dependents.py
+++ b/lib/spack/spack/test/cmd/dependents.py
@@ -35,7 +35,8 @@ dependents = SpackCommand('dependents')
def test_immediate_dependents(builtin_mock):
out = dependents('libelf')
actual = set(re.split(r'\s+', out.strip()))
- assert actual == set(['dyninst', 'libdwarf'])
+ assert actual == set(['dyninst', 'libdwarf',
+ 'patch-a-dependency', 'patch-several-dependencies'])
def test_transitive_dependents(builtin_mock):
@@ -43,7 +44,8 @@ def test_transitive_dependents(builtin_mock):
actual = set(re.split(r'\s+', out.strip()))
assert actual == set(
['callpath', 'dyninst', 'libdwarf', 'mpileaks', 'multivalue_variant',
- 'singlevalue-variant-dependent'])
+ 'singlevalue-variant-dependent',
+ 'patch-a-dependency', 'patch-several-dependencies'])
def test_immediate_installed_dependents(builtin_mock, database):
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 16ac5dfe86..80e93e8944 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -572,7 +572,7 @@ class MockPackage(object):
assert len(dependencies) == len(dependency_types)
for dep, dtype in zip(dependencies, dependency_types):
- d = Dependency(Spec(dep.name), type=dtype)
+ d = Dependency(self, Spec(dep.name), type=dtype)
if not conditions or dep.name not in conditions:
self.dependencies[dep.name] = {Spec(name): d}
else:
@@ -587,12 +587,15 @@ class MockPackage(object):
self.variants = {}
self.provided = {}
self.conflicts = {}
+ self.patches = {}
class MockPackageMultiRepo(object):
def __init__(self, packages):
self.spec_to_pkg = dict((x.name, x) for x in packages)
+ self.spec_to_pkg.update(
+ dict(('mockrepo.' + x.name, x) for x in packages))
def get(self, spec):
if not isinstance(spec, spack.spec.Spec):
diff --git a/lib/spack/spack/test/data/patch/bar.txt b/lib/spack/spack/test/data/patch/bar.txt
deleted file mode 100644
index ba578e48b1..0000000000
--- a/lib/spack/spack/test/data/patch/bar.txt
+++ /dev/null
@@ -1 +0,0 @@
-BAR
diff --git a/lib/spack/spack/test/data/patch/foo.patch b/lib/spack/spack/test/data/patch/foo.patch
new file mode 100644
index 0000000000..ff59bd4c54
--- /dev/null
+++ b/lib/spack/spack/test/data/patch/foo.patch
@@ -0,0 +1,7 @@
+--- a/foo.txt 2017-09-25 21:24:33.000000000 -0700
++++ b/foo.txt 2017-09-25 14:31:17.000000000 -0700
+@@ -1,2 +1,3 @@
++zeroth line
+ first line
+-second line
++third line
diff --git a/lib/spack/spack/test/data/patch/foo.tgz b/lib/spack/spack/test/data/patch/foo.tgz
index 73f598ac25..11ec586256 100644
--- a/lib/spack/spack/test/data/patch/foo.tgz
+++ b/lib/spack/spack/test/data/patch/foo.tgz
Binary files differ
diff --git a/lib/spack/spack/test/patch.py b/lib/spack/spack/test/patch.py
index 7976956748..0a91a0847c 100644
--- a/lib/spack/spack/test/patch.py
+++ b/lib/spack/spack/test/patch.py
@@ -22,63 +22,171 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
-import os.path
-
-import pytest
+import os
import sys
+import filecmp
+import pytest
+
+from llnl.util.filesystem import working_dir, mkdirp
import spack
import spack.util.compression
-import spack.stage
-
-
-@pytest.fixture()
-def mock_apply(monkeypatch):
- """Monkeypatches ``Patch.apply`` to test only the additional behavior of
- derived classes.
- """
-
- m = sys.modules['spack.patch']
-
- def check_expand(self, *args, **kwargs):
- # Check tarball expansion
- if spack.util.compression.allowed_archive(self.url):
- file = os.path.join(self.path, 'foo.txt')
- assert os.path.exists(file)
-
- # Check tarball fetching
- dirname = os.path.dirname(self.path)
- basename = os.path.basename(self.url)
- tarball = os.path.join(dirname, basename)
- assert os.path.exists(tarball)
-
- monkeypatch.setattr(m.Patch, 'apply', check_expand)
+from spack.stage import Stage
+from spack.spec import Spec
@pytest.fixture()
def mock_stage(tmpdir, monkeypatch):
-
- monkeypatch.setattr(spack, 'stage_path', str(tmpdir))
-
- class MockStage(object):
- def __init__(self):
- self.mirror_path = str(tmpdir)
-
- return MockStage()
+ # don't disrupt the spack install directory with tests.
+ mock_path = str(tmpdir)
+ monkeypatch.setattr(spack, 'stage_path', mock_path)
+ return mock_path
data_path = os.path.join(spack.test_path, 'data', 'patch')
-@pytest.mark.usefixtures('mock_apply')
-@pytest.mark.parametrize('filename,md5', [
- (os.path.join(data_path, 'foo.tgz'), 'bff717ca9cbbb293bdf188e44c540758'),
- (os.path.join(data_path, 'bar.txt'), 'f98bf6f12e995a053b7647b10d937912')
+@pytest.mark.parametrize('filename, sha256, archive_sha256', [
+ # compressed patch -- needs sha256 and archive_256
+ (os.path.join(data_path, 'foo.tgz'),
+ '252c0af58be3d90e5dc5e0d16658434c9efa5d20a5df6c10bf72c2d77f780866',
+ '4e8092a161ec6c3a1b5253176fcf33ce7ba23ee2ff27c75dbced589dabacd06e'),
+ # uncompressed patch -- needs only sha256
+ (os.path.join(data_path, 'foo.patch'),
+ '252c0af58be3d90e5dc5e0d16658434c9efa5d20a5df6c10bf72c2d77f780866',
+ None)
])
-def test_url_patch_expansion(mock_stage, filename, md5):
-
- m = sys.modules['spack.patch']
+def test_url_patch(mock_stage, filename, sha256, archive_sha256):
+ # Make a patch object
url = 'file://' + filename
- patch = m.Patch.create(None, url, 0, md5=md5)
- patch.apply(mock_stage)
+ m = sys.modules['spack.patch']
+ patch = m.Patch.create(
+ None, url, sha256=sha256, archive_sha256=archive_sha256)
+
+ # make a stage
+ with Stage(url) as stage: # TODO: url isn't used; maybe refactor Stage
+ # TODO: there is probably a better way to mock this.
+ stage.mirror_path = mock_stage # don't disrupt the spack install
+
+ # fake a source path
+ with working_dir(stage.path):
+ mkdirp('spack-expanded-archive')
+
+ with working_dir(stage.source_path):
+ # write a file to be patched
+ with open('foo.txt', 'w') as f:
+ f.write("""\
+first line
+second line
+""")
+ # write the expected result of patching.
+ with open('foo-expected.txt', 'w') as f:
+ f.write("""\
+zeroth line
+first line
+third line
+""")
+ # apply the patch and compare files
+ patch.apply(stage)
+
+ with working_dir(stage.source_path):
+ assert filecmp.cmp('foo.txt', 'foo-expected.txt')
+
+
+def test_patch_in_spec(builtin_mock, config):
+ """Test whether patches in a package appear in the spec."""
+ spec = Spec('patch')
+ spec.concretize()
+ assert 'patches' in list(spec.variants.keys())
+
+ # foo, bar, baz
+ assert (('b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c',
+ '7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730',
+ 'bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c') ==
+ spec.variants['patches'].value)
+
+
+def test_patched_dependency(builtin_mock, config):
+ """Test whether patched dependencies work."""
+ spec = Spec('patch-a-dependency')
+ spec.concretize()
+ assert 'patches' in list(spec['libelf'].variants.keys())
+
+ # foo
+ assert (('b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c',) ==
+ spec['libelf'].variants['patches'].value)
+
+
+def test_multiple_patched_dependencies(builtin_mock, config):
+ """Test whether multiple patched dependencies work."""
+ spec = Spec('patch-several-dependencies')
+ spec.concretize()
+
+ # basic patch on libelf
+ assert 'patches' in list(spec['libelf'].variants.keys())
+ # foo
+ assert (('b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c',) ==
+ spec['libelf'].variants['patches'].value)
+
+ # URL patches
+ assert 'patches' in list(spec['fake'].variants.keys())
+ # urlpatch.patch, urlpatch.patch.gz
+ assert (('abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
+ '1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd') ==
+ spec['fake'].variants['patches'].value)
+
+
+def test_conditional_patched_dependencies(builtin_mock, config):
+ """Test whether conditional patched dependencies work."""
+ spec = Spec('patch-several-dependencies @1.0')
+ spec.concretize()
+
+ # basic patch on libelf
+ assert 'patches' in list(spec['libelf'].variants.keys())
+ # foo
+ assert (('b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c',) ==
+ spec['libelf'].variants['patches'].value)
+
+ # conditional patch on libdwarf
+ assert 'patches' in list(spec['libdwarf'].variants.keys())
+ # bar
+ assert (('7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730',) ==
+ spec['libdwarf'].variants['patches'].value)
+ # baz is conditional on libdwarf version
+ assert ('bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c'
+ not in spec['libdwarf'].variants['patches'].value)
+
+ # URL patches
+ assert 'patches' in list(spec['fake'].variants.keys())
+ # urlpatch.patch, urlpatch.patch.gz
+ assert (('abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
+ '1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd') ==
+ spec['fake'].variants['patches'].value)
+
+
+def test_conditional_patched_deps_with_conditions(builtin_mock, config):
+ """Test whether conditional patched dependencies with conditions work."""
+ spec = Spec('patch-several-dependencies @1.0 ^libdwarf@20111030')
+ spec.concretize()
+
+ # basic patch on libelf
+ assert 'patches' in list(spec['libelf'].variants.keys())
+ # foo
+ assert ('b5bb9d8014a0f9b1d61e21e796d78dccdf1352f23cd32812f4850b878ae4944c'
+ in spec['libelf'].variants['patches'].value)
+
+ # conditional patch on libdwarf
+ assert 'patches' in list(spec['libdwarf'].variants.keys())
+ # bar
+ assert ('7d865e959b2466918c9863afca942d0fb89d7c9ac0c99bafc3749504ded97730'
+ in spec['libdwarf'].variants['patches'].value)
+ # baz is conditional on libdwarf version (no guarantee on order w/conds)
+ assert ('bf07a7fbb825fc0aae7bf4a1177b2b31fcf8a3feeaf7092761e18c859ee52a9c'
+ in spec['libdwarf'].variants['patches'].value)
+
+ # URL patches
+ assert 'patches' in list(spec['fake'].variants.keys())
+ # urlpatch.patch, urlpatch.patch.gz
+ assert (('abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
+ '1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd') ==
+ spec['fake'].variants['patches'].value)
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 719b3a2569..a45cb7a961 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -53,7 +53,7 @@ def saved_deps():
@pytest.fixture()
def set_dependency(saved_deps):
"""Returns a function that alters the dependency information
- for a package.
+ for a package in the ``saved_deps`` fixture.
"""
def _mock(pkg_name, spec, deptypes=all_deptypes):
"""Alters dependence information for a package.
@@ -67,7 +67,7 @@ def set_dependency(saved_deps):
saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
cond = Spec(pkg.name)
- dependency = Dependency(spec, deptypes)
+ dependency = Dependency(pkg, spec, type=deptypes)
pkg.dependencies[spec.name] = {cond: dependency}
return _mock
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index 0d75b83ebc..3f9ede1047 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -47,8 +47,7 @@ class Variant(object):
description,
values=(True, False),
multi=False,
- validator=None
- ):
+ validator=None):
"""Initialize a package variant.
Args:
@@ -220,10 +219,15 @@ class AbstractVariant(object):
def from_node_dict(name, value):
"""Reconstruct a variant from a node dict."""
if isinstance(value, list):
- value = ','.join(value)
- return MultiValuedVariant(name, value)
+ # read multi-value variants in and be faithful to the YAML
+ mvar = MultiValuedVariant(name, ())
+ mvar._value = tuple(value)
+ mvar._original_value = mvar._value
+ return mvar
+
elif str(value).upper() == 'TRUE' or str(value).upper() == 'FALSE':
return BoolValuedVariant(name, value)
+
return SingleValuedVariant(name, value)
def yaml_entry(self):
@@ -252,15 +256,16 @@ class AbstractVariant(object):
# Store the original value
self._original_value = value
- # Store a tuple of CSV string representations
- # Tuple is necessary here instead of list because the
- # values need to be hashed
- t = re.split(r'\s*,\s*', str(value))
+ if not isinstance(value, (tuple, list)):
+ # Store a tuple of CSV string representations
+ # Tuple is necessary here instead of list because the
+ # values need to be hashed
+ value = re.split(r'\s*,\s*', str(value))
# With multi-value variants it is necessary
# to remove duplicates and give an order
# to a set
- self._value = tuple(sorted(set(t)))
+ self._value = tuple(sorted(set(value)))
def _cmp_key(self):
return self.name, self.value