summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/basic_usage.rst33
-rw-r--r--lib/spack/spack/audit.py393
-rw-r--r--lib/spack/spack/cmd/audit.py80
-rw-r--r--lib/spack/spack/test/audit.py85
-rw-r--r--lib/spack/spack/test/cmd/audit.py34
5 files changed, 625 insertions, 0 deletions
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index db69ba42e9..cf1fc28614 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -1730,6 +1730,39 @@ This issue typically manifests with the error below:
A nicer error message is TBD in future versions of Spack.
+---------------
+Troubleshooting
+---------------
+
+The ``spack audit`` command:
+
+.. command-output:: spack audit -h
+
+can be used to detect a number of configuration issues. This command detects
+configuration settings which might not be strictly wrong but are not likely
+to be useful outside of special cases.
+
+It can also be used to detect dependency issues with packages - for example
+cases where a package constrains a dependency with a variant that doesn't
+exist (in this case Spack could report the problem ahead of time but
+automatically performing the check would slow down most runs of Spack).
+
+A detailed list of the checks currently implemented for each subcommand can be
+printed with:
+
+.. command-output:: spack -v audit list
+
+Depending on the use case, users might run the appropriate subcommands to obtain
+diagnostics. Issues, if found, are reported to stdout:
+
+.. code-block:: console
+
+ % spack audit packages lammps
+ PKG-DIRECTIVES: 1 issue found
+ 1. lammps: wrong variant in "conflicts" directive
+ the variant 'adios' does not exist
+ in /home/spack/spack/var/spack/repos/builtin/packages/lammps/package.py
+
------------
Getting Help
diff --git a/lib/spack/spack/audit.py b/lib/spack/spack/audit.py
new file mode 100644
index 0000000000..c9acdc39d2
--- /dev/null
+++ b/lib/spack/spack/audit.py
@@ -0,0 +1,393 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+"""Classes and functions to register audit checks for various parts of
+Spack and run them on-demand.
+
+To register a new class of sanity checks (e.g. sanity checks for
+compilers.yaml), the first action required is to create a new AuditClass
+object:
+
+.. code-block:: python
+
+ audit_cfgcmp = AuditClass(
+ tag='CFG-COMPILER',
+ description='Sanity checks on compilers.yaml',
+ kwargs=()
+ )
+
+This object is to be used as a decorator to register functions
+that will perform each a single check:
+
+.. code-block:: python
+
+ @audit_cfgcmp
+ def _search_duplicate_compilers(error_cls):
+ pass
+
+These functions need to take as argument the keywords declared when
+creating the decorator object plus an ``error_cls`` argument at the
+end, acting as a factory to create Error objects. It should return a
+(possibly empty) list of errors.
+
+Calls to each of these functions are triggered by the ``run`` method of
+the decorator object, that will forward the keyword arguments passed
+as input.
+"""
+import collections
+import itertools
+try:
+ from collections.abc import Sequence # novm
+except ImportError:
+ from collections import Sequence
+
+#: Map an audit tag to a list of callables implementing checks
+CALLBACKS = {}
+
+#: Map a group of checks to the list of related audit tags
+GROUPS = collections.defaultdict(list)
+
+
+class Error(object):
+ """Information on an error reported in a test."""
+ def __init__(self, summary, details):
+ self.summary = summary
+ self.details = tuple(details)
+
+ def __str__(self):
+ return self.summary + '\n' + '\n'.join([
+ ' ' + detail for detail in self.details
+ ])
+
+ def __eq__(self, other):
+ if self.summary != other.summary or self.details != other.details:
+ return False
+ return True
+
+ def __hash__(self):
+ value = (self.summary, self.details)
+ return hash(value)
+
+
+class AuditClass(Sequence):
+ def __init__(self, group, tag, description, kwargs):
+ """Return an object that acts as a decorator to register functions
+ associated with a specific class of sanity checks.
+
+ Args:
+ group (str): group in which this check is to be inserted
+ tag (str): tag uniquely identifying the class of sanity checks
+ description (str): description of the sanity checks performed
+ by this tag
+ kwargs (tuple of str): keyword arguments that each registered
+ function needs to accept
+ """
+ if tag in CALLBACKS:
+ msg = 'audit class "{0}" already registered'
+ raise ValueError(msg.format(tag))
+
+ self.group = group
+ self.tag = tag
+ self.description = description
+ self.kwargs = kwargs
+ self.callbacks = []
+
+ # Init the list of hooks
+ CALLBACKS[self.tag] = self
+
+ # Update the list of tags in the group
+ GROUPS[self.group].append(self.tag)
+
+ def __call__(self, func):
+ self.callbacks.append(func)
+
+ def __getitem__(self, item):
+ return self.callbacks[item]
+
+ def __len__(self):
+ return len(self.callbacks)
+
+ def run(self, **kwargs):
+ msg = 'please pass "{0}" as keyword arguments'
+ msg = msg.format(', '.join(self.kwargs))
+ assert set(self.kwargs) == set(kwargs), msg
+
+ errors = []
+ kwargs['error_cls'] = Error
+ for fn in self.callbacks:
+ errors.extend(fn(**kwargs))
+
+ return errors
+
+
+def run_group(group, **kwargs):
+ """Run the checks that are part of the group passed as argument.
+
+ Args:
+ group (str): group of checks to be run
+ **kwargs: keyword arguments forwarded to the checks
+
+ Returns:
+ List of (tag, errors) that failed.
+ """
+ reports = []
+ for check in GROUPS[group]:
+ errors = run_check(check, **kwargs)
+ reports.append((check, errors))
+ return reports
+
+
+def run_check(tag, **kwargs):
+ """Run the checks associated with a single tag.
+
+ Args:
+ tag (str): tag of the check
+ **kwargs: keyword arguments forwarded to the checks
+
+ Returns:
+ Errors occurred during the checks
+ """
+ return CALLBACKS[tag].run(**kwargs)
+
+
+# TODO: For the generic check to be useful for end users,
+# TODO: we need to implement hooks like described in
+# TODO: https://github.com/spack/spack/pull/23053/files#r630265011
+#: Generic checks relying on global state
+generic = AuditClass(
+ group='generic',
+ tag='GENERIC',
+ description='Generic checks relying on global variables',
+ kwargs=()
+)
+
+
+#: Sanity checks on compilers.yaml
+config_compiler = AuditClass(
+ group='configs',
+ tag='CFG-COMPILER',
+ description='Sanity checks on compilers.yaml',
+ kwargs=()
+)
+
+
+@config_compiler
+def _search_duplicate_compilers(error_cls):
+ """Report compilers with the same spec and two different definitions"""
+ import spack.config
+ errors = []
+
+ compilers = list(sorted(
+ spack.config.get('compilers'), key=lambda x: x['compiler']['spec']
+ ))
+ for spec, group in itertools.groupby(
+ compilers, key=lambda x: x['compiler']['spec']
+ ):
+ group = list(group)
+ if len(group) == 1:
+ continue
+
+ error_msg = 'Compiler defined multiple times: {0}'
+ try:
+ details = [str(x._start_mark).strip() for x in group]
+ except Exception:
+ details = []
+ errors.append(error_cls(
+ summary=error_msg.format(spec), details=details
+ ))
+
+ return errors
+
+
+#: Sanity checks on packages.yaml
+config_packages = AuditClass(
+ group='configs',
+ tag='CFG-PACKAGES',
+ description='Sanity checks on packages.yaml',
+ kwargs=()
+)
+
+
+@config_packages
+def _search_duplicate_specs_in_externals(error_cls):
+ """Search for duplicate specs declared as externals"""
+ import spack.config
+
+ errors, externals = [], collections.defaultdict(list)
+ packages_yaml = spack.config.get('packages')
+
+ for name, pkg_config in packages_yaml.items():
+ # No externals can be declared under all
+ if name == 'all' or 'externals' not in pkg_config:
+ continue
+
+ current_externals = pkg_config['externals']
+ for entry in current_externals:
+ # Ask for the string representation of the spec to normalize
+ # aspects of the spec that may be represented in multiple ways
+ # e.g. +foo or foo=true
+ key = str(spack.spec.Spec(entry['spec']))
+ externals[key].append(entry)
+
+ for spec, entries in sorted(externals.items()):
+ # If there's a single external for a spec we are fine
+ if len(entries) < 2:
+ continue
+
+ # Otherwise wwe need to report an error
+ error_msg = 'Multiple externals share the same spec: {0}'.format(spec)
+ try:
+ lines = [str(x._start_mark).strip() for x in entries]
+ details = [
+ 'Please remove all but one of the following entries:'
+ ] + lines + [
+ 'as they might result in non-deterministic hashes'
+ ]
+ except TypeError:
+ details = []
+
+ errors.append(error_cls(summary=error_msg, details=details))
+
+ return errors
+
+
+#: Sanity checks on package directives
+package_directives = AuditClass(
+ group='packages',
+ tag='PKG-DIRECTIVES',
+ description='Sanity checks on specs used in directives',
+ kwargs=('pkgs',)
+)
+
+
+@package_directives
+def _unknown_variants_in_directives(pkgs, error_cls):
+ """Report unknown or wrong variants in directives for this package"""
+ import llnl.util.lang
+ import spack.repo
+ import spack.spec
+
+ errors = []
+ for pkg_name in pkgs:
+ pkg = spack.repo.get(pkg_name)
+
+ # Check "conflicts" directive
+ for conflict, triggers in pkg.conflicts.items():
+ for trigger, _ in triggers:
+ vrn = spack.spec.Spec(conflict)
+ try:
+ vrn.constrain(trigger)
+ except Exception as e:
+ msg = 'Generic error in conflict for package "{0}": '
+ errors.append(error_cls(msg.format(pkg.name), [str(e)]))
+ continue
+ errors.extend(_analyze_variants_in_directive(
+ pkg, vrn, directive='conflicts', error_cls=error_cls
+ ))
+
+ # Check "depends_on" directive
+ for _, triggers in pkg.dependencies.items():
+ triggers = list(triggers)
+ for trigger in list(triggers):
+ vrn = spack.spec.Spec(trigger)
+ errors.extend(_analyze_variants_in_directive(
+ pkg, vrn, directive='depends_on', error_cls=error_cls
+ ))
+
+ # Check "patch" directive
+ for _, triggers in pkg.provided.items():
+ triggers = [spack.spec.Spec(x) for x in triggers]
+ for vrn in triggers:
+ errors.extend(_analyze_variants_in_directive(
+ pkg, vrn, directive='patch', error_cls=error_cls
+ ))
+
+ # Check "resource" directive
+ for vrn in pkg.resources:
+ errors.extend(_analyze_variants_in_directive(
+ pkg, vrn, directive='resource', error_cls=error_cls
+ ))
+
+ return llnl.util.lang.dedupe(errors)
+
+
+@package_directives
+def _unknown_variants_in_dependencies(pkgs, error_cls):
+ """Report unknown dependencies and wrong variants for dependencies"""
+ import spack.repo
+ import spack.spec
+
+ errors = []
+ for pkg_name in pkgs:
+ pkg = spack.repo.get(pkg_name)
+ filename = spack.repo.path.filename_for_package_name(pkg_name)
+ for dependency_name, dependency_data in pkg.dependencies.items():
+ # No need to analyze virtual packages
+ if spack.repo.path.is_virtual(dependency_name):
+ continue
+
+ try:
+ dependency_pkg = spack.repo.get(dependency_name)
+ except spack.repo.UnknownPackageError:
+ # This dependency is completely missing, so report
+ # and continue the analysis
+ summary = (pkg_name + ": unknown package '{0}' in "
+ "'depends_on' directive".format(dependency_name))
+ details = [
+ " in " + filename
+ ]
+ errors.append(error_cls(summary=summary, details=details))
+ continue
+
+ for _, dependency_edge in dependency_data.items():
+ dependency_variants = dependency_edge.spec.variants
+ for name, value in dependency_variants.items():
+ try:
+ dependency_pkg.variants[name].validate_or_raise(
+ value, pkg=dependency_pkg
+ )
+ except Exception as e:
+ summary = (pkg_name + ": wrong variant used for a "
+ "dependency in a 'depends_on' directive")
+ error_msg = str(e).strip()
+ if isinstance(e, KeyError):
+ error_msg = ('the variant {0} does not '
+ 'exist'.format(error_msg))
+ error_msg += " in package '" + dependency_name + "'"
+
+ errors.append(error_cls(
+ summary=summary, details=[error_msg, 'in ' + filename]
+ ))
+
+ return errors
+
+
+def _analyze_variants_in_directive(pkg, constraint, directive, error_cls):
+ import spack.variant
+ variant_exceptions = (
+ spack.variant.InconsistentValidationError,
+ spack.variant.MultipleValuesInExclusiveVariantError,
+ spack.variant.InvalidVariantValueError,
+ KeyError
+ )
+ errors = []
+ for name, v in constraint.variants.items():
+ try:
+ pkg.variants[name].validate_or_raise(v, pkg=pkg)
+ except variant_exceptions as e:
+ summary = pkg.name + ': wrong variant in "{0}" directive'
+ summary = summary.format(directive)
+ filename = spack.repo.path.filename_for_package_name(pkg.name)
+
+ error_msg = str(e).strip()
+ if isinstance(e, KeyError):
+ error_msg = 'the variant {0} does not exist'.format(error_msg)
+
+ err = error_cls(summary=summary, details=[
+ error_msg, 'in ' + filename
+ ])
+
+ errors.append(err)
+
+ return errors
diff --git a/lib/spack/spack/cmd/audit.py b/lib/spack/spack/cmd/audit.py
new file mode 100644
index 0000000000..09285258f7
--- /dev/null
+++ b/lib/spack/spack/cmd/audit.py
@@ -0,0 +1,80 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import llnl.util.tty.color as cl
+import spack.audit
+import spack.repo
+
+
+description = "audit configuration files, packages, etc."
+section = "system"
+level = "short"
+
+
+def setup_parser(subparser):
+ # Top level flags, valid for every audit class
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subcommand')
+
+ # Audit configuration files
+ sp.add_parser('configs', help='audit configuration files')
+
+ # Audit package recipes
+ pkg_parser = sp.add_parser('packages', help='audit package recipes')
+ pkg_parser.add_argument(
+ 'name', metavar='PKG', nargs='*',
+ help='package to be analyzed (if none all packages will be processed)',
+ )
+
+ # List all checks
+ sp.add_parser('list', help='list available checks and exits')
+
+
+def configs(parser, args):
+ reports = spack.audit.run_group(args.subcommand)
+ _process_reports(reports)
+
+
+def packages(parser, args):
+ pkgs = args.name or spack.repo.path.all_package_names()
+ reports = spack.audit.run_group(args.subcommand, pkgs=pkgs)
+ _process_reports(reports)
+
+
+def list(parser, args):
+ for subcommand, check_tags in spack.audit.GROUPS.items():
+ print(cl.colorize('@*b{' + subcommand + '}:'))
+ for tag in check_tags:
+ audit_obj = spack.audit.CALLBACKS[tag]
+ print(' ' + audit_obj.description)
+ if args.verbose:
+ for idx, fn in enumerate(audit_obj.callbacks):
+ print(' {0}. '.format(idx + 1) + fn.__doc__)
+ print()
+ print()
+
+
+def audit(parser, args):
+ subcommands = {
+ 'configs': configs,
+ 'packages': packages,
+ 'list': list
+ }
+ subcommands[args.subcommand](parser, args)
+
+
+def _process_reports(reports):
+ for check, errors in reports:
+ if errors:
+ msg = '{0}: {1} issue{2} found'.format(
+ check, len(errors), '' if len(errors) == 1 else 's'
+ )
+ header = '@*b{' + msg + '}'
+ print(cl.colorize(header))
+ for idx, error in enumerate(errors):
+ print(str(idx + 1) + '. ' + str(error))
+ raise SystemExit(1)
+ else:
+ msg = '{0}: 0 issues found.'.format(check)
+ header = '@*b{' + msg + '}'
+ print(cl.colorize(header))
diff --git a/lib/spack/spack/test/audit.py b/lib/spack/spack/test/audit.py
new file mode 100644
index 0000000000..a2edff185e
--- /dev/null
+++ b/lib/spack/spack/test/audit.py
@@ -0,0 +1,85 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import pytest
+
+import spack.audit
+import spack.config
+
+
+@pytest.mark.parametrize('packages,failing_check', [
+ # A non existing variant is used in a conflict directive
+ (['wrong-variant-in-conflicts'], 'PKG-DIRECTIVES'),
+ # The package declares a non-existing dependency
+ (['missing-dependency'], 'PKG-DIRECTIVES'),
+ # The package use a non existing variant in a depends_on directive
+ (['wrong-variant-in-depends-on'], 'PKG-DIRECTIVES'),
+ # This package has no issues
+ (['mpileaks'], None)
+])
+def test_package_audits(packages, failing_check, mock_packages):
+ reports = spack.audit.run_group('packages', pkgs=packages)
+
+ for check, errors in reports:
+ # Check that we have errors only if there is an expected failure
+ # and that the tag matches our expectations
+ if bool(failing_check):
+ assert check == failing_check
+ assert errors
+ else:
+ assert not errors
+
+
+# Data used in the test below to audit the double definition of a compiler
+_double_compiler_definition = [
+ {'compiler': {
+ 'spec': 'gcc@9.0.1',
+ 'paths': {
+ 'cc': '/usr/bin/gcc-9',
+ 'cxx': '/usr/bin/g++-9',
+ 'f77': '/usr/bin/gfortran-9',
+ 'fc': '/usr/bin/gfortran-9'
+ },
+ 'flags': {},
+ 'operating_system': 'ubuntu18.04',
+ 'target': 'x86_64',
+ 'modules': [],
+ 'environment': {},
+ 'extra_rpaths': []
+ }},
+ {'compiler': {
+ 'spec': 'gcc@9.0.1',
+ 'paths': {
+ 'cc': '/usr/bin/gcc-9',
+ 'cxx': '/usr/bin/g++-9',
+ 'f77': '/usr/bin/gfortran-9',
+ 'fc': '/usr/bin/gfortran-9'
+ },
+ 'flags': {"cflags": "-O3"},
+ 'operating_system': 'ubuntu18.04',
+ 'target': 'x86_64',
+ 'modules': [],
+ 'environment': {},
+ 'extra_rpaths': []
+ }}
+]
+
+
+@pytest.mark.parametrize('config_section,data,failing_check', [
+ # Double compiler definitions in compilers.yaml
+ ('compilers', _double_compiler_definition, 'CFG-COMPILER'),
+ # Multiple definitions of the same external spec in packages.yaml
+ ('packages', {
+ "mpileaks": {"externals": [
+ {"spec": "mpileaks@1.0.0", "prefix": "/"},
+ {"spec": "mpileaks@1.0.0", "prefix": "/usr"},
+ ]}
+ }, 'CFG-PACKAGES')
+])
+def test_config_audits(config_section, data, failing_check):
+ with spack.config.override(config_section, data):
+ reports = spack.audit.run_group('configs')
+ assert any(
+ (check == failing_check) and errors for check, errors in reports
+ )
diff --git a/lib/spack/spack/test/cmd/audit.py b/lib/spack/spack/test/cmd/audit.py
new file mode 100644
index 0000000000..815a0cdf1b
--- /dev/null
+++ b/lib/spack/spack/test/cmd/audit.py
@@ -0,0 +1,34 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+import pytest
+
+from spack.main import SpackCommand
+
+
+audit = SpackCommand('audit')
+
+
+@pytest.mark.parametrize('pkgs,expected_returncode', [
+ # A single package with issues, should exit 1
+ (['wrong-variant-in-conflicts'], 1),
+ # A "sane" package should exit 0
+ (['mpileaks'], 0),
+ # A package with issues and a package without should exit 1
+ (['wrong-variant-in-conflicts', 'mpileaks'], 1),
+ (['mpileaks', 'wrong-variant-in-conflicts'], 1),
+])
+def test_audit_packages(
+ pkgs, expected_returncode, mutable_config, mock_packages
+):
+ """Sanity check ``spack audit packages`` to make sure it works."""
+ audit('packages', *pkgs, fail_on_error=False)
+ assert audit.returncode == expected_returncode
+
+
+def test_audit_configs(mutable_config, mock_packages):
+ """Sanity check ``spack audit packages`` to make sure it works."""
+ audit('configs', fail_on_error=False)
+ # The mock configuration has duplicate definitions of some compilers
+ assert audit.returncode == 1