summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/spack/llnl/util/lang.py9
-rw-r--r--lib/spack/spack/cmd/solve.py12
-rw-r--r--lib/spack/spack/solver/asp.py295
-rw-r--r--lib/spack/spack/solver/concretize.lp47
-rwxr-xr-xshare/spack/spack-completion.bash11
5 files changed, 310 insertions, 64 deletions
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index b9b034d4d4..88058d4bd1 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -671,3 +671,12 @@ def uniq(sequence):
uniq_list.append(element)
last = element
return uniq_list
+
+
+class Devnull(object):
+ """Null stream with less overhead than ``os.devnull``.
+
+ See https://stackoverflow.com/a/2929954.
+ """
+ def write(self, *_):
+ pass
diff --git a/lib/spack/spack/cmd/solve.py b/lib/spack/spack/cmd/solve.py
index 74ec42d5af..480ced6a29 100644
--- a/lib/spack/spack/cmd/solve.py
+++ b/lib/spack/spack/cmd/solve.py
@@ -59,6 +59,9 @@ def setup_parser(subparser):
'--timers', action='store_true', default=False,
help='print out timers for different solve phases')
subparser.add_argument(
+ '--stats', action='store_true', default=False,
+ help='print out statistics from clingo')
+ subparser.add_argument(
'specs', nargs=argparse.REMAINDER, help="specs of packages")
@@ -93,7 +96,9 @@ def solve(parser, args):
specs = spack.cmd.parse_specs(args.specs)
# dump generated ASP program
- result = asp.solve(specs, dump=dump, models=models, timers=args.timers)
+ result = asp.solve(
+ specs, dump=dump, models=models, timers=args.timers, stats=args.stats
+ )
if 'solutions' not in dump:
return
@@ -105,11 +110,10 @@ def solve(parser, args):
# dump the solutions as concretized specs
if 'solutions' in dump:
best = min(result.answers)
- assert best[1] == result.answers[-1][1]
- opt, i, answer = best
+ opt, _, answer = best
if not args.format:
- tty.msg("Best of %d answers." % (i + 1))
+ tty.msg("Best of %d answers." % result.nmodels)
tty.msg("Optimization: %s" % opt)
# iterate over roots from command line
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index f1d2b40b60..eb01a4b77a 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -6,7 +6,9 @@
from __future__ import print_function
import collections
+import os
import pkgutil
+import pprint
import re
import sys
import tempfile
@@ -14,7 +16,13 @@ import time
import types
from six import string_types
+try:
+ import clingo
+except ImportError:
+ clingo = None
+
import llnl.util.cpu
+import llnl.util.lang
import llnl.util.tty as tty
import llnl.util.tty.color as color
@@ -23,6 +31,7 @@ import spack.architecture
import spack.cmd
import spack.config
import spack.dependency
+import spack.error
import spack.spec
import spack.package
import spack.package_prefs
@@ -105,6 +114,17 @@ class AspFunction(AspObject):
def __call__(self, *args):
return AspFunction(self.name, args)
+ def symbol(self, positive=True):
+ def argify(arg):
+ if isinstance(arg, bool):
+ return str(arg)
+ elif isinstance(arg, int):
+ return arg
+ else:
+ return str(arg)
+ return clingo.Function(
+ self.name, [argify(arg) for arg in self.args], positive=positive)
+
def __getitem___(self, *args):
self.args[:] = args
return self
@@ -127,14 +147,6 @@ class AspAnd(AspObject):
return s
-class AspNot(AspObject):
- def __init__(self, arg):
- self.arg = arg
-
- def __str__(self):
- return "not %s" % self.arg
-
-
class AspOneOf(AspObject):
def __init__(self, *args):
args = listify(args)
@@ -195,14 +207,16 @@ def check_packages_exist(specs):
class Result(object):
"""Result of an ASP solve."""
- def __init__(self, asp):
+ def __init__(self, asp=None):
self.asp = asp
self.satisfiable = None
self.optimal = None
self.warnings = None
+ self.nmodels = 0
# specs ordered by optimization level
self.answers = []
+ self.cores = []
class ClingoDriver(object):
@@ -233,9 +247,6 @@ class ClingoDriver(object):
def _and(self, *args):
return AspAnd(*args)
- def _not(self, arg):
- return AspNot(arg)
-
def fact(self, head):
"""ASP fact (a rule without a body)."""
self.out.write("%s.\n" % head)
@@ -250,15 +261,9 @@ class ClingoDriver(object):
def before_setup(self):
"""Must be called before program is generated."""
# read the main ASP program from concrtize.lp
- concretize_lp = pkgutil.get_data('spack.solver', 'concretize.lp')
- self.out.write(concretize_lp.decode("utf-8"))
- return self
def after_setup(self):
"""Must be called after program is generated."""
- self.out.write('\n')
- display_lp = pkgutil.get_data('spack.solver', 'display.lp')
- self.out.write(display_lp.decode("utf-8"))
def parse_model_functions(self, function_strings):
function_re = re.compile(r'(\w+)\(([^)]*)\)')
@@ -301,7 +306,8 @@ class ClingoDriver(object):
result.answers.append((opt, best_model_number, specs))
- def solve(self, solver_setup, specs, dump=None, models=0, timers=False):
+ def solve(self, solver_setup, specs, dump=None, models=0,
+ timers=False, stats=False):
def colorize(string):
color.cprint(highlight(color.cescape(string)))
@@ -309,12 +315,16 @@ class ClingoDriver(object):
with tempfile.TemporaryFile("w+") as program:
self.out = program
- self.before_setup()
+ concretize_lp = pkgutil.get_data('spack.solver', 'concretize.lp')
+ program.write(concretize_lp.decode("utf-8"))
+
solver_setup.setup(self, specs)
- self.after_setup()
- timer.phase("generate")
- program.seek(0)
+ program.write('\n')
+ display_lp = pkgutil.get_data('spack.solver', 'display.lp')
+ program.write(display_lp.decode("utf-8"))
+
+ timer.phase("generate")
result = Result(program.read())
program.seek(0)
@@ -385,6 +395,172 @@ class ClingoDriver(object):
return result
+class PyclingoDriver(object):
+ def __init__(self, cores=True, asp=None):
+ """Driver for the Python clingo interface.
+
+ Arguments:
+ cores (bool): whether to generate unsatisfiable cores for better
+ error reporting.
+ asp (file-like): optional stream to write a text-based ASP program
+ for debugging or verification.
+ """
+ assert clingo, "PyclingoDriver requires clingo with Python support"
+ self.out = asp or llnl.util.lang.Devnull()
+ self.cores = cores
+
+ def title(self, name, char):
+ self.out.write('\n')
+ self.out.write("%" + (char * 76))
+ self.out.write('\n')
+ self.out.write("%% %s\n" % name)
+ self.out.write("%" + (char * 76))
+ self.out.write('\n')
+
+ def h1(self, name):
+ self.title(name, "=")
+
+ def h2(self, name):
+ self.title(name, "-")
+
+ def newline(self):
+ self.out.write('\n')
+
+ def one_of(self, *args):
+ return AspOneOf(*args)
+
+ def _and(self, *args):
+ return AspAnd(*args)
+
+ def fact(self, head):
+ """ASP fact (a rule without a body)."""
+ sym = head.symbol()
+ self.out.write("%s.\n" % sym)
+
+ atom = self.backend.add_atom(sym)
+ self.backend.add_rule([atom], [], choice=self.cores)
+ if self.cores:
+ self.assumptions.append(atom)
+
+ def rule(self, head, body):
+ """ASP rule (an implication)."""
+ if isinstance(body, AspAnd):
+ args = [f.symbol() for f in body.args]
+ elif isinstance(body, clingo.Symbol):
+ args = [body]
+ else:
+ raise TypeError("Invalid typee for rule body: ", type(body))
+
+ symbols = [head.symbol()] + args
+ atoms = {}
+ for s in symbols:
+ atoms[s] = self.backend.add_atom(s)
+
+ # Special assumption atom to allow rules to be in unsat cores
+ rule_str = "%s :- %s." % (
+ head.symbol(), ",".join(str(a) for a in args))
+
+ # rule atoms need to be choices before we can assume them
+ if self.cores:
+ rule_sym = clingo.Function("rule", [rule_str])
+ rule_atom = self.backend.add_atom(rule_sym)
+ self.backend.add_rule([rule_atom], [], choice=True)
+ self.assumptions.append(rule_atom)
+ rule_atoms = [rule_atom]
+ else:
+ rule_atoms = []
+
+ # print rule before adding
+ self.out.write("%s\n" % rule_str)
+ self.backend.add_rule(
+ [atoms[head.symbol()]],
+ [atoms[s] for s in args] + rule_atoms
+ )
+
+ def solve(self, solver_setup, specs, dump=None, nmodels=0,
+ timers=False, stats=False):
+ calls = [0]
+ class Context(object):
+ def version_satisfies(self, a, b):
+ calls[0] += 1
+ return bool(ver(a.string).satisfies(ver(b.string)))
+
+ timer = Timer()
+
+ # Initialize the control object for the solver
+ self.control = clingo.Control()
+ self.control.configuration.solve.models = nmodels
+ self.control.configuration.configuration = 'tweety'
+ self.control.configuration.solver.opt_strategy = "bb,dec"
+
+ # set up the problem -- this generates facts and rules
+ self.assumptions = []
+ with self.control.backend() as backend:
+ self.backend = backend
+ solver_setup.setup(self, specs)
+ timer.phase("setup")
+
+ # read in the main ASP program and display logic -- these are
+ # handwritten, not generated, so we load them as resources
+ parent_dir = os.path.dirname(__file__)
+ self.control.load(os.path.join(parent_dir, 'concretize.lp'))
+ self.control.load(os.path.join(parent_dir, "display.lp"))
+ timer.phase("load")
+
+ # Grounding is the first step in the solve -- it turns our facts
+ # and first-order logic rules into propositional logic.
+ self.control.ground([("base", [])], context=Context())
+ timer.phase("ground")
+
+ # With a grounded program, we can run the solve.
+ result = Result()
+ models = [] # stable moodels if things go well
+ cores = [] # unsatisfiable cores if they do not
+ def on_model(model):
+ models.append((model.cost, model.symbols(shown=True)))
+
+ solve_result = self.control.solve(
+ assumptions=self.assumptions,
+ on_model=on_model,
+ on_core=cores.append
+ )
+ timer.phase("solve")
+
+ # once done, construct the solve result
+ result.satisfiable = solve_result.satisfiable
+ if result.satisfiable:
+ builder = SpecBuilder(specs)
+ min_cost, best_model = min(models)
+ tuples = [
+ (sym.name, [a.string for a in sym.arguments])
+ for sym in best_model
+ ]
+ answers = builder.build_specs(tuples)
+ result.answers.append((list(min_cost), 0, answers))
+
+ elif cores:
+ symbols = dict(
+ (a.literal, a.symbol)
+ for a in self.control.symbolic_atoms
+ )
+ for core in cores:
+ core_symbols = []
+ for atom in core:
+ sym = symbols[atom]
+ if sym.name == "rule":
+ sym = sym.arguments[0].string
+ result.cores.append(core_symbols)
+
+ if timers:
+ timer.write()
+ print()
+ if stats:
+ print("Statistics:")
+ pprint.pprint(self.control.statistics)
+
+ return result
+
+
class SpackSolverSetup(object):
"""Class to set up and run a Spack concretization solve."""
@@ -393,6 +569,11 @@ class SpackSolverSetup(object):
self.possible_versions = {}
self.possible_virtuals = None
self.possible_compilers = []
+ self.version_constraints = set()
+ self.post_facts = []
+
+ # id for dummy variables
+ self.card = 0
def pkg_version_rules(self, pkg):
"""Output declared versions of a package.
@@ -419,7 +600,7 @@ class SpackSolverSetup(object):
-priority.get(v, 0),
# The preferred=True flag (packages or packages.yaml or both?)
- pkg.versions.get(v).get('preferred', False),
+ pkg.versions.get(v, {}).get('preferred', False),
# ------- Regular case: use latest non-develop version by default.
# Avoid @develop version, which would otherwise be the "largest"
@@ -448,22 +629,29 @@ class SpackSolverSetup(object):
if spec.concrete:
return [fn.version(spec.name, spec.version)]
- # version must be *one* of the ones the spec allows.
- allowed_versions = [
- v for v in sorted(self.possible_versions[spec.name])
- if v.satisfies(spec.versions)
- ]
-
- # don't bother restricting anything if all versions are allowed
- if len(allowed_versions) == len(self.possible_versions[spec.name]):
+ if spec.versions == ver(":"):
return []
- predicates = [fn.version(spec.name, v) for v in allowed_versions]
+ # record all version constraints for later
+ self.version_constraints.add((spec.name, spec.versions))
+ return [fn.version_satisfies(spec.name, spec.versions)]
+
+ # # version must be *one* of the ones the spec allows.
+ # allowed_versions = [
+ # v for v in sorted(self.possible_versions[spec.name])
+ # if v.satisfies(spec.versions)
+ # ]
- # conflict with any versions that do not satisfy the spec
- if predicates:
- return [self.gen.one_of(*predicates)]
- return []
+ # # don't bother restricting anything if all versions are allowed
+ # if len(allowed_versions) == len(self.possible_versions[spec.name]):
+ # return []
+
+ # predicates = [fn.version(spec.name, v) for v in allowed_versions]
+
+ # # conflict with any versions that do not satisfy the spec
+ # if predicates:
+ # return [self.gen.one_of(*predicates)]
+ # return []
def available_compilers(self):
"""Facts about available compilers."""
@@ -526,13 +714,11 @@ class SpackSolverSetup(object):
self.gen.fact(fn.variant(pkg.name, name))
single_value = not variant.multi
- single = fn.variant_single_value(pkg.name, name)
if single_value:
- self.gen.fact(single)
+ self.gen.fact(fn.variant_single_value(pkg.name, name))
self.gen.fact(
fn.variant_default_value(pkg.name, name, variant.default))
else:
- self.gen.fact(self.gen._not(single))
defaults = variant.default.split(',')
for val in sorted(defaults):
self.gen.fact(
@@ -585,7 +771,10 @@ class SpackSolverSetup(object):
)
# add constraints on the dependency from dep spec.
- for clause in self.spec_clauses(dep.spec):
+ clauses = self.spec_clauses(dep.spec)
+ if spack.repo.path.is_virtual(dep.spec.name):
+ clauses = []
+ for clause in clauses:
self.gen.rule(
clause,
self.gen._and(
@@ -708,6 +897,9 @@ class SpackSolverSetup(object):
spec.name, spec.compiler.name, spec.compiler.version))
elif spec.compiler.versions:
+ f.node_compiler_version_satisfies(
+ spec.name, spec.compiler.namd, spec.compiler.version)
+
compiler_list = spack.compilers.all_compiler_specs()
possible_compiler_versions = [
f.node_compiler_version(
@@ -716,6 +908,8 @@ class SpackSolverSetup(object):
for compiler in compiler_list
if compiler.satisfies(spec.compiler)
]
+
+
clauses.append(self.gen.one_of(*possible_compiler_versions))
for version in possible_compiler_versions:
clauses.append(
@@ -818,7 +1012,7 @@ class SpackSolverSetup(object):
# many targets can make things slow.
# TODO: investigate this.
best_targets = set([uarch.family.name])
- for compiler in compilers:
+ for compiler in sorted(compilers):
supported = self._supported_targets(compiler, compatible_targets)
if not supported:
continue
@@ -893,7 +1087,7 @@ class SpackSolverSetup(object):
return cspecs
def setup(self, driver, specs):
- """Generate an ASP program with relevant constarints for specs.
+ """Generate an ASP program with relevant constraints for specs.
This calls methods on the solve driver to set up the problem with
facts and rules from all possible dependencies of the input
@@ -955,6 +1149,10 @@ class SpackSolverSetup(object):
for clause in self.spec_clauses(dep):
self.gen.fact(clause)
+ self.gen.h1("Version Constraints")
+ for name, versions in sorted(self.version_constraints):
+ self.gen.fact(fn.version_constraint(name, versions))
+ self.gen.newline()
class SpecBuilder(object):
"""Class with actions to rebuild a spec from ASP results."""
@@ -1086,6 +1284,8 @@ class SpecBuilder(object):
self._specs = {}
for name, args in function_tuples:
action = getattr(self, name, None)
+
+ # print out unknown actions so we can display them for debugging
if not action:
print("%s(%s)" % (name, ", ".join(str(a) for a in args)))
continue
@@ -1144,7 +1344,7 @@ def highlight(string):
#
# These are handwritten parts for the Spack ASP model.
#
-def solve(specs, dump=None, models=0, timers=False):
+def solve(specs, dump=None, models=0, timers=False, stats=False):
"""Solve for a stable model of specs.
Arguments:
@@ -1152,6 +1352,9 @@ def solve(specs, dump=None, models=0, timers=False):
dump (tuple): what to dump
models (int): number of models to search (default: 0)
"""
- driver = ClingoDriver()
+ driver = PyclingoDriver()
+ if "asp" in dump:
+ driver.out = sys.stdout
+
setup = SpackSolverSetup()
- return driver.solve(setup, specs, dump, models, timers)
+ return driver.solve(setup, specs, dump, models, timers, stats)
diff --git a/lib/spack/spack/solver/concretize.lp b/lib/spack/spack/solver/concretize.lp
index daf0c23ac7..604720c6a4 100644
--- a/lib/spack/spack/solver/concretize.lp
+++ b/lib/spack/spack/solver/concretize.lp
@@ -10,18 +10,26 @@
version_declared(Package, Version) :- version_declared(Package, Version, _).
% If something is a package, it has only one version and that must be a
-% possible version.
-1 { version(Package, Version) : version_possible(Package, Version) } 1
+% declared version.
+1 { version(Package, Version) : version_declared(Package, Version) } 1
:- node(Package).
-% If a version is declared but conflicted, it's not possible.
-version_possible(Package, Version)
- :- version_declared(Package, Version), not version_conflict(Package, Version).
+% no conflicting versions can be assigned
+:- version(Package, Version),
+ version_satisfies(Package, Constraint),
+ 0 = @version_satisfies(Version, Constraint).
-version_weight(Package, Version, Weight)
+version_satisfies(Package, Constraint)
+ :- node(Package),
+ version(Package, Version),
+ version_constraint(Package, Constraint),
+ 1 = @version_satisfies(Version, Constraint).
+
+version_weight(Package, Weight)
:- version(Package, Version), version_declared(Package, Version, Weight).
#defined version_conflict/2.
+#defined version_constraint/2.
%-----------------------------------------------------------------------------
% Dependency semantics
@@ -47,9 +55,11 @@ depends_on(Package, Dependency, Type)
1 { node(Package) : provides_virtual(Package, Virtual) } 1
:- virtual_node(Virtual).
-% for any virtual, there can be at most one provider in the DAG
+% a node that provides a virtual is a provider
provider(Package, Virtual)
- :- node(Package), provides_virtual(Package, Virtual).
+ :- node(Package), provides_virtual(Package, Virtual).
+
+% for any virtual, there can be at most one provider in the DAG
0 { provider(Package, Virtual) : node(Package) } 1 :- virtual(Virtual).
% give dependents the virtuals they want
@@ -73,10 +83,9 @@ provider_weight(Dependency, 100)
not default_provider_preference(Virtual, Dependency, _).
% all nodes must be reachable from some root
-needed(Package) :- root(Package), node(Package).
-needed(Dependency) :- root(Package), depends_on(Package, Dependency).
-needed(Dependency)
- :- needed(Package), depends_on(Package, Dependency), node(Package).
+node(Package) :- root(Package).
+needed(Package) :- root(Package).
+needed(Dependency) :- needed(Package), depends_on(Package, Dependency).
:- node(Package), not needed(Package).
% real dependencies imply new nodes.
@@ -231,6 +240,16 @@ node_target_match(Package, 1)
1 { compiler_weight(Package, Weight) : compiler_weight(Package, Weight) } 1
:- node(Package).
+% no conflicting compiler versions
+:- node_compiler_version(Package, Compiler, Version),
+ node_compiler_version_satisfies(Package, Compiler, Constraint),
+ 0 = @version_satisfies(Version, Constraint).
+
+node_compiler_version_satisfies(Package, Constraint)
+ :- node_compiler_version(Package, Compiler, Version),
+ node_compiler_version_constraint(Package, Compiler, Constraint),
+ 1 = @version_satisfies(Version, Constraint).
+
% dependencies imply we should try to match hard compiler constraints
% todo: look at what to do about intersecting constraints here. we'd
% ideally go with the "lowest" pref in the DAG
@@ -256,6 +275,8 @@ compiler_version_match(Package, 1)
#defined node_compiler_hard/2.
#defined node_compiler_version_hard/3.
+#defined node_compiler_version_constraint/3.
+#defined node_compiler_version_satisfies/3.
% compilers weighted by preference acccording to packages.yaml
compiler_weight(Package, Weight)
@@ -359,7 +380,7 @@ root(Dependency, 1) :- not root(Dependency), node(Dependency).
% prefer more recent versions.
#minimize{
- Weight@8,Package,Version : version_weight(Package, Version, Weight)
+ Weight@8,Package : version_weight(Package, Weight)
}.
% compiler preferences
diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash
index 2a11fac65c..7d54414397 100755
--- a/share/spack/spack-completion.bash
+++ b/share/spack/spack-completion.bash
@@ -320,7 +320,7 @@ _spack() {
then
SPACK_COMPREPLY="-h --help -H --all-help --color -C --config-scope -d --debug --timestamp --pdb -e --env -D --env-dir -E --no-env --use-env-repo -k --insecure -l --enable-locks -L --disable-locks -m --mock -p --profile --sorted-profile --lines -v --verbose --stacktrace -V --version --print-shell-vars"
else
- SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup spec stage test tutorial undevelop uninstall unload url verify versions view"
+ SPACK_COMPREPLY="activate add arch blame build-env buildcache cd checksum ci clean clone commands compiler compilers concretize config containerize create deactivate debug dependencies dependents deprecate dev-build develop docs edit env extensions external fetch find flake8 gc gpg graph help info install license list load location log-parse maintainers mirror module patch pkg providers pydoc python reindex remove rm repo resource restage setup solve spec stage test tutorial undevelop uninstall unload url verify versions view"
fi
}
@@ -1464,6 +1464,15 @@ _spack_setup() {
fi
}
+_spack_solve() {
+ if $list_options
+ then
+ SPACK_COMPREPLY="-h --help --show --models -l --long -L --very-long -I --install-status -y --yaml -j --json -c --cover -N --namespaces -t --types --timers --stats"
+ else
+ _all_packages
+ fi
+}
+
_spack_spec() {
if $list_options
then