summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--lib/spack/spack/graph.py90
-rw-r--r--lib/spack/spack/hooks/sbang.py12
-rw-r--r--lib/spack/spack/spec.py4
-rw-r--r--lib/spack/spack/test/sbang.py17
-rw-r--r--lib/spack/spack/test/spec_dag.py128
-rw-r--r--lib/spack/spack/util/web.py28
-rw-r--r--var/spack/repos/builtin/packages/dealii/package.py17
-rw-r--r--var/spack/repos/builtin/packages/libmonitor/package.py8
-rw-r--r--var/spack/repos/builtin/packages/ocaml/package.py43
-rw-r--r--var/spack/repos/builtin/packages/trilinos/package.py1
-rw-r--r--var/spack/repos/builtin/packages/unison/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xerces-c/package.py9
12 files changed, 259 insertions, 149 deletions
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 063e4647b6..80d1199ef5 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -61,7 +61,6 @@ Note that ``graph_ascii`` assumes a single spec while ``graph_dot``
can take a number of specs as input.
"""
-__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
from heapq import *
@@ -71,6 +70,8 @@ from llnl.util.tty.color import *
import spack
from spack.spec import Spec
+__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
+
def topological_sort(spec, **kwargs):
"""Topological sort for specs.
@@ -94,6 +95,7 @@ def topological_sort(spec, **kwargs):
nodes = spec.index()
topo_order = []
+ par = dict((name, parents(nodes[name])) for name in nodes.keys())
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining)
@@ -102,12 +104,12 @@ def topological_sort(spec, **kwargs):
topo_order.append(name)
node = nodes[name]
- for dep in children(node).values():
- del parents(dep)[node.name]
- if not parents(dep):
+ for dep in children(node):
+ par[dep.name].remove(node)
+ if not par[dep.name]:
heappush(remaining, dep.name)
- if any(parents(s) for s in spec.traverse()):
+ if any(par.get(s.name, []) for s in spec.traverse()):
raise ValueError("Spec has cycles!")
else:
return topo_order
@@ -132,6 +134,7 @@ def find(seq, predicate):
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
+
class AsciiGraph(object):
def __init__(self):
# These can be set after initialization or after a call to
@@ -153,18 +156,15 @@ class AsciiGraph(object):
self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line
-
def _indent(self):
self._out.write(self.indent * ' ')
-
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge)
-
def _connect_deps(self, i, deps, label=None):
"""Connect dependencies to existing edges in the frontier.
@@ -199,7 +199,8 @@ class AsciiGraph(object):
collapse = True
if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1.
- self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j)))
+ self._back_edge_line([], j, i + 1, True,
+ label + "-1.5 " + str((i + 1, j)))
collapse = False
else:
@@ -207,19 +208,20 @@ class AsciiGraph(object):
if self._prev_state == NODE and self._prev_index < i:
i += 1
- if i-j > 1:
+ if i - j > 1:
# We need two lines to connect if distance > 1
- self._back_edge_line([], j, i, True, label + "-1 " + str((i,j)))
+ self._back_edge_line([], j, i, True,
+ label + "-1 " + str((i, j)))
collapse = False
- self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j)))
+ self._back_edge_line([j], -1, -1, collapse,
+ label + "-2 " + str((i, j)))
return True
elif deps:
self._frontier.insert(i, deps)
return False
-
def _set_state(self, state, index, label=None):
if state not in states:
raise ValueError("Invalid graph state!")
@@ -233,7 +235,6 @@ class AsciiGraph(object):
self._out.write("%-20s" % (str(label) if label else ''))
self._out.write("%s" % self._frontier)
-
def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
"""Write part of a backwards edge in the graph.
@@ -287,27 +288,26 @@ class AsciiGraph(object):
self._indent()
for p in prev_ends:
- advance(p, lambda: [("| ", self._pos)] )
- advance(p+1, lambda: [("|/", self._pos)] )
+ advance(p, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
+ advance(p + 1, lambda: [("|/", self._pos)]) # NOQA: ignore=E272
if end >= 0:
- advance(end + 1, lambda: [("| ", self._pos)] )
- advance(start - 1, lambda: [("|", self._pos), ("_", end)] )
+ advance(end + 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
+ advance(start - 1, lambda: [("|", self._pos), ("_", end)]) # NOQA: ignore=E272
else:
- advance(start - 1, lambda: [("| ", self._pos)] )
+ advance(start - 1, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
if start >= 0:
- advance(start, lambda: [("|", self._pos), ("/", end)] )
+ advance(start, lambda: [("|", self._pos), ("/", end)]) # NOQA: ignore=E272
if collapse:
- advance(flen, lambda: [(" /", self._pos)] )
+ advance(flen, lambda: [(" /", self._pos)]) # NOQA: ignore=E272
else:
- advance(flen, lambda: [("| ", self._pos)] )
+ advance(flen, lambda: [("| ", self._pos)]) # NOQA: ignore=E272
self._set_state(BACK_EDGE, end, label)
self._out.write("\n")
-
def _node_line(self, index, name):
"""Writes a line with a node at index."""
self._indent()
@@ -316,14 +316,13 @@ class AsciiGraph(object):
self._out.write("%s " % self.node_character)
- for c in range(index+1, len(self._frontier)):
+ for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c)
self._out.write(" %s" % name)
self._set_state(NODE, index)
self._out.write("\n")
-
def _collapse_line(self, index):
"""Write a collapsing line after a node was added at index."""
self._indent()
@@ -335,36 +334,33 @@ class AsciiGraph(object):
self._set_state(COLLAPSE, index)
self._out.write("\n")
-
def _merge_right_line(self, index):
"""Edge at index is same as edge to right. Merge directly with '\'"""
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
- self._write_edge("\\", index+1)
- for c in range(index+1, len(self._frontier)):
- self._write_edge("| ", c )
+ self._write_edge("\\", index + 1)
+ for c in range(index + 1, len(self._frontier)):
+ self._write_edge("| ", c)
self._set_state(MERGE_RIGHT, index)
self._out.write("\n")
-
def _expand_right_line(self, index):
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
- self._write_edge("\\", index+1)
+ self._write_edge("\\", index + 1)
- for c in range(index+2, len(self._frontier)):
+ for c in range(index + 2, len(self._frontier)):
self._write_edge(" \\", c)
self._set_state(EXPAND_RIGHT, index)
self._out.write("\n")
-
def write(self, spec, **kwargs):
"""Write out an ascii graph of the provided spec.
@@ -398,7 +394,7 @@ class AsciiGraph(object):
# Colors associated with each node in the DAG.
# Edges are colored by the node they point to.
self._name_to_color = dict((name, self.colors[i % len(self.colors)])
- for i, name in enumerate(topo_order))
+ for i, name in enumerate(topo_order))
# Frontier tracks open edges of the graph as it's written out.
self._frontier = [[spec.name]]
@@ -407,7 +403,8 @@ class AsciiGraph(object):
i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0:
- # Expand frontier until there are enough columns for all children.
+ # Expand frontier until there are enough columns for all
+ # children.
# Figure out how many back connections there are and
# sort them so we do them in order
@@ -424,8 +421,9 @@ class AsciiGraph(object):
prev_ends = []
for j, (b, d) in enumerate(back):
self._frontier[i].remove(d)
- if i-b > 1:
- self._back_edge_line(prev_ends, b, i, False, 'left-1')
+ if i - b > 1:
+ self._back_edge_line(prev_ends, b, i, False,
+ 'left-1')
del prev_ends[:]
prev_ends.append(b)
@@ -439,12 +437,13 @@ class AsciiGraph(object):
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
- if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1
- and self._frontier[i+1][0] in self._frontier[i]):
+ if (i + 1 < len(self._frontier) and
+ len(self._frontier[i + 1]) == 1 and
+ self._frontier[i + 1][0] in self._frontier[i]):
# We need to connect to the element to the right.
# Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract.
- name = self._frontier[i+1][0]
+ name = self._frontier[i + 1][0]
self._frontier[i].remove(name)
self._merge_right_line(i)
@@ -458,9 +457,8 @@ class AsciiGraph(object):
self._frontier.pop(i)
self._connect_deps(i, deps, "post-expand")
-
# Handle any remaining back edges to the right
- j = i+1
+ j = i + 1
while j < len(self._frontier):
deps = self._frontier.pop(j)
if not self._connect_deps(j, deps, "back-from-right"):
@@ -477,9 +475,10 @@ class AsciiGraph(object):
# Replace node with its dependencies
self._frontier.pop(i)
- if node.dependencies:
- deps = sorted((d for d in node.dependencies), reverse=True)
- self._connect_deps(i, deps, "new-deps") # anywhere.
+ if node.dependencies():
+ deps = sorted((d.name for d in node.dependencies()),
+ reverse=True)
+ self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
self._collapse_line(i)
@@ -501,7 +500,6 @@ def graph_ascii(spec, **kwargs):
graph.write(spec, color=color, out=out)
-
def graph_dot(*specs, **kwargs):
"""Generate a graph in dot format of all provided specs.
diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py
index 3a957c6e0e..02c1ce3816 100644
--- a/lib/spack/spack/hooks/sbang.py
+++ b/lib/spack/spack/hooks/sbang.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import stat
import re
import llnl.util.tty as tty
@@ -62,10 +63,21 @@ def filter_shebang(path):
if re.search(r'^#!(/[^/]*)*lua\b', original):
original = re.sub(r'^#', '--', original)
+ # Change non-writable files to be writable if needed.
+ saved_mode = None
+ if not os.access(path, os.W_OK):
+ st = os.stat(path)
+ saved_mode = st.st_mode
+ os.chmod(path, saved_mode | stat.S_IWRITE)
+
with open(path, 'w') as new_file:
new_file.write(new_sbang_line)
new_file.write(original)
+ # Restore original permissions.
+ if saved_mode is not None:
+ os.chmod(path, saved_mode)
+
tty.warn("Patched overlong shebang in %s" % path)
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index c6277fc8d2..8e44075f42 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -460,7 +460,7 @@ class DependencyMap(HashableMap):
def __str__(self):
return ''.join(
- ["^" + str(self[name].spec) for name in sorted(self.keys())])
+ ["^" + self[name].format() for name in sorted(self.keys())])
@key_ordering
@@ -861,7 +861,7 @@ class Spec(object):
for name in sorted(successors):
child = successors[name]
children = child.spec.traverse_with_deptype(
- visited, d=d + 1, deptype=deptype_query,
+ visited, d=d + 1, deptype=deptype,
deptype_query=deptype_query,
_self_deptype=child.deptypes, **kwargs)
for elt in children:
diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py
index ed54ff90b0..4ce854a1d8 100644
--- a/lib/spack/spack/test/sbang.py
+++ b/lib/spack/spack/test/sbang.py
@@ -26,6 +26,7 @@
Test that Spack's shebang filtering works correctly.
"""
import os
+import stat
import unittest
import tempfile
import shutil
@@ -41,6 +42,7 @@ lua_line_patched = "--!/this/" + ('x' * 200) + "/is/lua\n"
sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root
last_line = "last!\n"
+
class SbangTest(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
@@ -74,10 +76,8 @@ class SbangTest(unittest.TestCase):
f.write(long_line)
f.write(last_line)
-
def tearDown(self):
- shutil.rmtree(self.tempdir, ignore_errors=True)
-
+ shutil.rmtree(self.tempdir, ignore_errors=True)
def test_shebang_handling(self):
filter_shebangs_in_directory(self.tempdir)
@@ -104,3 +104,14 @@ class SbangTest(unittest.TestCase):
self.assertEqual(f.readline(), sbang_line)
self.assertEqual(f.readline(), long_line)
self.assertEqual(f.readline(), last_line)
+
+ def test_shebang_handles_non_writable_files(self):
+ # make a file non-writable
+ st = os.stat(self.long_shebang)
+ not_writable_mode = st.st_mode & ~stat.S_IWRITE
+ os.chmod(self.long_shebang, not_writable_mode)
+
+ self.test_shebang_handling()
+
+ st = os.stat(self.long_shebang)
+ self.assertEqual(oct(not_writable_mode), oct(st.st_mode))
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 972e79aa20..8522431fbb 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -32,8 +32,6 @@ import spack
import spack.architecture
import spack.package
-from llnl.util.lang import list_modules
-
from spack.spec import Spec
from spack.test.mock_packages_test import *
@@ -51,21 +49,19 @@ class SpecDagTest(MockPackagesTest):
self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
spec.normalize)
-
def test_preorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi')
dag.normalize()
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'zmpi', 'fake']
- pairs = zip([0,1,2,3,4,2,3], names)
+ pairs = zip([0, 1, 2, 3, 4, 2, 3], names)
traversal = dag.traverse()
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_preorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -73,14 +69,13 @@ class SpecDagTest(MockPackagesTest):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi']
- pairs = zip([0,1,2,3,4,3,2,3,1], names)
+ pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names)
traversal = dag.traverse(cover='edges')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='edges', depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_preorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -88,14 +83,13 @@ class SpecDagTest(MockPackagesTest):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
- pairs = zip([0,1,2,3,4,3,2,3,1,2], names)
+ pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names)
traversal = dag.traverse(cover='paths')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='paths', depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -103,14 +97,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
'callpath', 'mpileaks']
- pairs = zip([4,3,2,3,2,1,0], names)
+ pairs = zip([4, 3, 2, 3, 2, 1, 0], names)
traversal = dag.traverse(order='post')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -118,14 +111,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'zmpi', 'mpileaks']
- pairs = zip([4,3,3,2,3,2,1,1,0], names)
+ pairs = zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names)
traversal = dag.traverse(cover='edges', order='post')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='edges', depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_postorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -133,14 +125,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'fake', 'zmpi', 'mpileaks']
- pairs = zip([4,3,3,2,3,2,1,2,1,0], names)
+ pairs = zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names)
traversal = dag.traverse(cover='paths', order='post')
self.assertEqual([x.name for x in traversal], names)
traversal = dag.traverse(cover='paths', depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ self.assertEqual([(x, y.name) for x, y in traversal], pairs)
def test_conflicting_spec_constraints(self):
mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
@@ -153,8 +144,7 @@ class SpecDagTest(MockPackagesTest):
spec._dependencies['mpich'].spec = Spec('mpich@2.0')
self.assertRaises(spack.spec.InconsistentSpecError,
- lambda: mpileaks.flat_dependencies(copy=False))
-
+ lambda: mpileaks.flat_dependencies(copy=False))
def test_normalize_twice(self):
"""Make sure normalize can be run twice on the same spec,
@@ -166,7 +156,6 @@ class SpecDagTest(MockPackagesTest):
spec.normalize()
self.assertEqual(n1, spec)
-
def test_normalize_a_lot(self):
spec = Spec('mpileaks')
spec.normalize()
@@ -174,7 +163,6 @@ class SpecDagTest(MockPackagesTest):
spec.normalize()
spec.normalize()
-
def test_normalize_with_virtual_spec(self):
dag = Spec('mpileaks',
Spec('callpath',
@@ -189,80 +177,80 @@ class SpecDagTest(MockPackagesTest):
# make sure nothing with the same name occurs twice
counts = {}
for spec in dag.traverse(key=id):
- if not spec.name in counts:
+ if spec.name not in counts:
counts[spec.name] = 0
counts[spec.name] += 1
for name in counts:
self.assertEqual(counts[name], 1, "Count for %s was not 1!" % name)
-
def check_links(self, spec_to_check):
for spec in spec_to_check.traverse():
for dependent in spec.dependents():
self.assertTrue(
spec.name in dependent.dependencies_dict(),
"%s not in dependencies of %s" %
- (spec.name, dependent.name))
+ (spec.name, dependent.name))
for dependency in spec.dependencies():
self.assertTrue(
spec.name in dependency.dependents_dict(),
"%s not in dependents of %s" %
- (spec.name, dependency.name))
-
+ (spec.name, dependency.name))
def test_dependents_and_dependencies_are_correct(self):
spec = Spec('mpileaks',
- Spec('callpath',
- Spec('dyninst',
- Spec('libdwarf',
- Spec('libelf')),
- Spec('libelf')),
- Spec('mpi')),
- Spec('mpi'))
+ Spec('callpath',
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf')),
+ Spec('libelf')),
+ Spec('mpi')),
+ Spec('mpi'))
self.check_links(spec)
spec.normalize()
self.check_links(spec)
-
def test_unsatisfiable_version(self):
self.set_pkg_dep('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
-
+ self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
+ spec.normalize)
def test_unsatisfiable_compiler(self):
self.set_pkg_dep('mpileaks', 'mpich%gcc')
- spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
-
+ spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf'
+ ' ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError,
+ spec.normalize)
def test_unsatisfiable_compiler_version(self):
self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
- spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
-
+ spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf'
+ ' ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError,
+ spec.normalize)
def test_unsatisfiable_architecture(self):
- platform = spack.architecture.platform()
-
self.set_pkg_dep('mpileaks', 'mpich platform=test target=be')
- spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
-
+ spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath'
+ ' ^dyninst ^libelf ^libdwarf')
+ self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError,
+ spec.normalize)
def test_invalid_dep(self):
spec = Spec('libelf ^mpich')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
+ self.assertRaises(spack.spec.InvalidDependencyException,
+ spec.normalize)
spec = Spec('libelf ^libdwarf')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
+ self.assertRaises(spack.spec.InvalidDependencyException,
+ spec.normalize)
spec = Spec('mpich ^dyninst ^libelf')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
-
+ self.assertRaises(spack.spec.InvalidDependencyException,
+ spec.normalize)
def test_equal(self):
# Different spec structures to test for equality
@@ -301,10 +289,10 @@ class SpecDagTest(MockPackagesTest):
self.assertFalse(flip_flat.eq_dag(flip_dag))
self.assertFalse(dag.eq_dag(flip_dag))
-
def test_normalize_mpileaks(self):
# Spec parsed in from a string
- spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf')
+ spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11'
+ ' ^libdwarf')
# What that spec should look like after parsing
expected_flat = Spec(
@@ -367,7 +355,6 @@ class SpecDagTest(MockPackagesTest):
self.assertEqual(spec, non_unique_nodes)
self.assertFalse(spec.eq_dag(non_unique_nodes))
-
def test_normalize_with_virtual_package(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
spec.normalize()
@@ -383,7 +370,6 @@ class SpecDagTest(MockPackagesTest):
self.assertEqual(str(spec), str(expected_normalized))
-
def test_contains(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
self.assertTrue(Spec('mpi') in spec)
@@ -394,7 +380,6 @@ class SpecDagTest(MockPackagesTest):
self.assertFalse(Spec('libgoblin') in spec)
self.assertTrue(Spec('mpileaks') in spec)
-
def test_copy_simple(self):
orig = Spec('mpileaks')
copy = orig.copy()
@@ -411,7 +396,6 @@ class SpecDagTest(MockPackagesTest):
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
-
def test_copy_normalized(self):
orig = Spec('mpileaks')
orig.normalize()
@@ -429,7 +413,6 @@ class SpecDagTest(MockPackagesTest):
copy_ids = set(id(s) for s in copy.traverse())
self.assertFalse(orig_ids.intersection(copy_ids))
-
def test_copy_concretized(self):
orig = Spec('mpileaks')
orig.concretize()
@@ -476,20 +459,20 @@ class SpecDagTest(MockPackagesTest):
dag = Spec('dtuse')
dag.normalize()
- names = ['dtuse', 'dttop', 'dtlink1', 'dtlink3', 'dtlink4',
- 'dtrun1', 'dtlink5', 'dtrun3']
+ names = ['dtuse', 'dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
+ 'dtlink1', 'dtlink3', 'dtlink4']
- traversal = dag.traverse()
+ traversal = dag.traverse(deptype=('build', 'link'))
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_with_builddeps(self):
dag = Spec('dttop')
dag.normalize()
- names = ['dttop', 'dtbuild1', 'dtlink2', 'dtrun2', 'dtlink1',
- 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5', 'dtrun3']
+ names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
+ 'dtlink1', 'dtlink3', 'dtlink4']
- traversal = dag.traverse()
+ traversal = dag.traverse(deptype=('build', 'link'))
self.assertEqual([x.name for x in traversal], names)
def test_deptype_traversal_full(self):
@@ -500,15 +483,14 @@ class SpecDagTest(MockPackagesTest):
'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
'dtrun3', 'dtbuild3']
- traversal = dag.traverse(deptype_query=spack.alldeps)
+ traversal = dag.traverse(deptype=spack.alldeps)
self.assertEqual([x.name for x in traversal], names)
- def test_deptype_traversal_pythonpath(self):
+ def test_deptype_traversal_run(self):
dag = Spec('dttop')
dag.normalize()
- names = ['dttop', 'dtbuild1', 'dtrun2', 'dtlink1', 'dtrun1',
- 'dtrun3']
+ names = ['dttop', 'dtrun1', 'dtrun3']
- traversal = dag.traverse(deptype=spack.nolink, deptype_query='run')
+ traversal = dag.traverse(deptype='run')
self.assertEqual([x.name for x in traversal], names)
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 47abc507e0..cac783a368 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -25,8 +25,7 @@
import re
import os
import sys
-import subprocess
-import urllib2, cookielib
+import urllib2
import urlparse
from multiprocessing import Pool
from HTMLParser import HTMLParser, HTMLParseError
@@ -84,7 +83,7 @@ def _spider(args):
req.get_method = lambda: "HEAD"
resp = urllib2.urlopen(req, timeout=TIMEOUT)
- if not "Content-type" in resp.headers:
+ if "Content-type" not in resp.headers:
tty.debug("ignoring page " + url)
return pages, links
@@ -125,11 +124,11 @@ def _spider(args):
if abs_link in visited:
continue
- # If we're not at max depth, follow links.
- if depth < max_depth:
- subcalls.append((abs_link, visited, root, None,
- depth+1, max_depth, raise_on_error))
- visited.add(abs_link)
+ # If we're not at max depth, follow links.
+ if depth < max_depth:
+ subcalls.append((abs_link, visited, root, None,
+ depth + 1, max_depth, raise_on_error))
+ visited.add(abs_link)
if subcalls:
try:
@@ -142,22 +141,22 @@ def _spider(args):
pool.terminate()
pool.join()
- except urllib2.URLError, e:
+ except urllib2.URLError as e:
tty.debug(e)
if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url)
- except HTMLParseError, e:
+ except HTMLParseError as e:
# This error indicates that Python's HTML parser sucks.
msg = "Got an error parsing HTML."
# Pre-2.7.3 Pythons in particular have rather prickly HTML parsing.
- if sys.version_info[:3] < (2,7,3):
+ if sys.version_info[:3] < (2, 7, 3):
msg += " Use Python 2.7.3 or newer for better HTML parsing."
tty.warn(msg, url, "HTMLParseError: " + str(e))
- except Exception, e:
+ except Exception as e:
# Other types of errors are completely ignored, except in debug mode.
tty.debug("Error in _spider: %s" % e)
@@ -173,7 +172,8 @@ def spider(root_url, **kwargs):
performance over a sequential fetch.
"""
max_depth = kwargs.setdefault('depth', 1)
- pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False))
+ pages, links = _spider((root_url, set(), root_url, None,
+ 1, max_depth, False))
return pages, links
@@ -235,7 +235,7 @@ def find_versions_of_archive(*archive_urls, **kwargs):
try:
ver = spack.url.parse_version(url)
versions[ver] = url
- except spack.url.UndetectableVersionError as e:
+ except spack.url.UndetectableVersionError:
continue
return versions
diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py
index 54604d351f..18c0849f68 100644
--- a/var/spack/repos/builtin/packages/dealii/package.py
+++ b/var/spack/repos/builtin/packages/dealii/package.py
@@ -51,14 +51,21 @@ class Dealii(Package):
variant('petsc', default=True, description='Compile with Petsc (only with MPI)')
variant('slepc', default=True, description='Compile with Slepc (only with Petsc and MPI)')
variant('trilinos', default=True, description='Compile with Trilinos (only with MPI)')
+ variant('python', default=True, description='Compile with Python bindings')
# required dependencies, light version
depends_on("blas")
# Boost 1.58 is blacklisted, see
# https://github.com/dealii/dealii/issues/1591
# Require at least 1.59
- depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='~mpi') # NOQA: ignore=E501
- depends_on("boost@1.59.0:+mpi+thread+system+serialization+iostreams", when='+mpi') # NOQA: ignore=E501
+ # +python won't affect @:8.4.1
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='@:8.4.1~mpi')
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", when='@:8.4.1+mpi')
+ # since @8.5.0: (and @develop) python bindings are introduced:
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams", when='@8.5.0:~mpi~python')
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi", when='@8.5.0:+mpi~python')
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams+python", when='@8.5.0:~mpi+python')
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi+python", when='@8.5.0:+mpi+python')
depends_on("bzip2")
depends_on("cmake", type='build')
depends_on("lapack")
@@ -120,6 +127,12 @@ class Dealii(Package):
'-DZLIB_DIR=%s' % spec['zlib'].prefix
])
+ if spec.satisfies('@8.5.0:'):
+ options.extend([
+ '-DDEAL_II_COMPONENT_PYTHON_BINDINGS=%s' %
+ ('ON' if '+python' in spec else 'OFF')
+ ])
+
# Set directory structure:
if spec.satisfies('@:8.2.1'):
options.extend(['-DDEAL_II_COMPONENT_COMPAT_FILES=OFF'])
diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py
index 883d8af405..611e602e2f 100644
--- a/var/spack/repos/builtin/packages/libmonitor/package.py
+++ b/var/spack/repos/builtin/packages/libmonitor/package.py
@@ -24,19 +24,17 @@
##############################################################################
from spack import *
+
class Libmonitor(Package):
"""Libmonitor is a library for process and thread control."""
- homepage = "http://hpctoolkit.org"
-
- version('20130218', svn='http://libmonitor.googlecode.com/svn/trunk/', revision=146)
+ homepage = "https://github.com/HPCToolkit/libmonitor"
+ version('20130218', git='https://github.com/HPCToolkit/libmonitor.git', commit='4f2311e')
variant('krellpatch', default=False, description="build with openspeedshop based patch.")
-
patch('libmonitorkrell-0000.patch', when='@20130218+krellpatch')
patch('libmonitorkrell-0001.patch', when='@20130218+krellpatch')
patch('libmonitorkrell-0002.patch', when='@20130218+krellpatch')
-
def install(self, spec, prefix):
configure("--prefix=" + prefix)
make()
diff --git a/var/spack/repos/builtin/packages/ocaml/package.py b/var/spack/repos/builtin/packages/ocaml/package.py
new file mode 100644
index 0000000000..9488d3b7a6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ocaml/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ocaml(Package):
+ """OCaml is an industrial strength programming language supporting
+ functional, imperative and object-oriented styles"""
+
+ homepage = "http://ocaml.org/"
+ url = "http://caml.inria.fr/pub/distrib/ocaml-4.03/ocaml-4.03.0.tar.gz"
+
+ version('4.03.0', '43812739ea1b4641cf480f57f977c149')
+
+ depends_on('ncurses')
+
+ def install(self, spec, prefix):
+ configure('-prefix', '{0}'.format(prefix))
+
+ make('world.opt')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py
index 4d1d27e74a..1d83e055c9 100644
--- a/var/spack/repos/builtin/packages/trilinos/package.py
+++ b/var/spack/repos/builtin/packages/trilinos/package.py
@@ -45,6 +45,7 @@ class Trilinos(Package):
homepage = "https://trilinos.org/"
url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz"
+ version('12.6.4', 'db25056617c688f6f25092376a03200f')
version('12.6.3', '960f5f4d3f7c3da818e5a5fb4684559eff7e0c25f959ef576561b8a52f0e4d1e')
version('12.6.2', '0c076090508170ddee5efeed317745027f9418319720dc40a072e478775279f9')
version('12.6.1', 'adcf2d3aab74cdda98f88fee19cd1442604199b0515ee3da4d80cbe8f37d00e4')
diff --git a/var/spack/repos/builtin/packages/unison/package.py b/var/spack/repos/builtin/packages/unison/package.py
new file mode 100644
index 0000000000..181e1e6410
--- /dev/null
+++ b/var/spack/repos/builtin/packages/unison/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Unison(Package):
+ """Unison is a file-synchronization tool for OSX, Unix, and
+ Windows. It allows two replicas of a collection of files and
+ directories to be stored on different hosts (or different disks
+ on the same host), modified separately, and then brought up to
+ date by propagating the changes in each replica to the
+ other."""
+
+ homepage = "https://www.cis.upenn.edu/~bcpierce/unison/"
+ url = "https://www.seas.upenn.edu/~bcpierce/unison//download/releases/stable/unison-2.48.3.tar.gz"
+
+ version('2.48.4', '5334b78c7e68169df7de95f4c6c4b60f')
+
+ depends_on('ocaml', type='build')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ make('./mkProjectInfo')
+ make('UISTYLE=text')
+
+ mkdirp(prefix.bin)
+ install('unison', prefix.bin)
+ set_executable(join_path(prefix.bin, 'unison'))
diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py
index 2efccc3c08..d0c2d3d497 100644
--- a/var/spack/repos/builtin/packages/xerces-c/package.py
+++ b/var/spack/repos/builtin/packages/xerces-c/package.py
@@ -24,16 +24,18 @@
##############################################################################
from spack import *
+
class XercesC(Package):
""" Xerces-C++ is a validating XML parser written in a portable subset of C++.
Xerces-C++ makes it easy to give your application the ability to read and
write XML data. A shared library is provided for parsing, generating,
- manipulating, and validating XML documents using the DOM, SAX, and SAX2 APIs.
+ manipulating, and validating XML documents using the DOM, SAX, and SAX2
+ APIs.
"""
homepage = "https://xerces.apache.org/xerces-c"
- url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.3.tar.bz2"
- version('3.1.3', '5e333b55cb43e6b025ddf0e5d0f0fb0d')
+ url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.4.tar.bz2"
+ version('3.1.4', 'd04ae9d8b2dee2157c6db95fa908abfd')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
@@ -41,4 +43,3 @@ class XercesC(Package):
make("clean")
make()
make("install")
-