summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorTodd Gamblin <tgamblin@llnl.gov>2018-05-16 10:57:40 -0700
committerscheibelp <scheibel1@llnl.gov>2018-05-17 14:10:30 -0700
commitf202198777b4353c57c26fa87739b306070fd960 (patch)
tree325c7fa10d591359650eb9f87e970d03df55f669 /lib
parent3493f7e793e5bf5fdc0c51978ea476185025c192 (diff)
downloadspack-f202198777b4353c57c26fa87739b306070fd960.tar.gz
spack-f202198777b4353c57c26fa87739b306070fd960.tar.bz2
spack-f202198777b4353c57c26fa87739b306070fd960.tar.xz
spack-f202198777b4353c57c26fa87739b306070fd960.zip
Convert lazy singleton functions to Singleton object
- simplify the singleton pattern across the codebase - reduce lines of code needed for crufty initialization - reduce functions that need to mess with a global - Singletons whose semantics changed: - spack.store.store() -> spack.store - spack.repo.path() -> spack.repo.path - spack.config.config() -> spack.config.config - spack.caches.fetch_cache() -> spack.caches.fetch_cache - spack.caches.misc_cache() -> spack.caches.misc_cache
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/llnl/util/lang.py45
-rw-r--r--lib/spack/spack/binary_distribution.py12
-rw-r--r--lib/spack/spack/build_environment.py2
-rw-r--r--lib/spack/spack/caches.py45
-rw-r--r--lib/spack/spack/cmd/__init__.py4
-rw-r--r--lib/spack/spack/cmd/activate.py4
-rw-r--r--lib/spack/spack/cmd/bootstrap.py2
-rw-r--r--lib/spack/spack/cmd/buildcache.py4
-rw-r--r--lib/spack/spack/cmd/clean.py4
-rw-r--r--lib/spack/spack/cmd/common/arguments.py4
-rw-r--r--lib/spack/spack/cmd/compiler.py2
-rw-r--r--lib/spack/spack/cmd/config.py4
-rw-r--r--lib/spack/spack/cmd/create.py4
-rw-r--r--lib/spack/spack/cmd/deactivate.py6
-rw-r--r--lib/spack/spack/cmd/debug.py6
-rw-r--r--lib/spack/spack/cmd/dependencies.py4
-rw-r--r--lib/spack/spack/cmd/dependents.py8
-rw-r--r--lib/spack/spack/cmd/diy.py2
-rw-r--r--lib/spack/spack/cmd/edit.py4
-rw-r--r--lib/spack/spack/cmd/extensions.py10
-rw-r--r--lib/spack/spack/cmd/find.py2
-rw-r--r--lib/spack/spack/cmd/graph.py2
-rw-r--r--lib/spack/spack/cmd/install.py2
-rw-r--r--lib/spack/spack/cmd/list.py2
-rw-r--r--lib/spack/spack/cmd/location.py4
-rw-r--r--lib/spack/spack/cmd/module.py2
-rw-r--r--lib/spack/spack/cmd/pkg.py2
-rw-r--r--lib/spack/spack/cmd/providers.py4
-rw-r--r--lib/spack/spack/cmd/reindex.py2
-rw-r--r--lib/spack/spack/cmd/setup.py4
-rw-r--r--lib/spack/spack/cmd/uninstall.py6
-rw-r--r--lib/spack/spack/cmd/url.py4
-rw-r--r--lib/spack/spack/cmd/view.py4
-rw-r--r--lib/spack/spack/compilers/__init__.py4
-rw-r--r--lib/spack/spack/concretize.py28
-rw-r--r--lib/spack/spack/config.py57
-rw-r--r--lib/spack/spack/database.py12
-rw-r--r--lib/spack/spack/filesystem_view.py12
-rw-r--r--lib/spack/spack/hooks/yaml_version_check.py2
-rw-r--r--lib/spack/spack/package.py82
-rw-r--r--lib/spack/spack/package_prefs.py2
-rw-r--r--lib/spack/spack/relocate.py12
-rw-r--r--lib/spack/spack/repo.py72
-rw-r--r--lib/spack/spack/reporters/junit.py2
-rw-r--r--lib/spack/spack/spec.py20
-rw-r--r--lib/spack/spack/stage.py4
-rw-r--r--lib/spack/spack/store.py31
-rw-r--r--lib/spack/spack/test/cmd/clean.py8
-rw-r--r--lib/spack/spack/test/cmd/dependencies.py4
-rw-r--r--lib/spack/spack/test/cmd/dependents.py6
-rw-r--r--lib/spack/spack/test/cmd/install.py20
-rw-r--r--lib/spack/spack/test/cmd/uninstall.py2
-rw-r--r--lib/spack/spack/test/concretize.py4
-rw-r--r--lib/spack/spack/test/concretize_preferences.py2
-rw-r--r--lib/spack/spack/test/config.py18
-rw-r--r--lib/spack/spack/test/conftest.py174
-rw-r--r--lib/spack/spack/test/database.py199
-rw-r--r--lib/spack/spack/test/directory_layout.py4
-rw-r--r--lib/spack/spack/test/package_sanity.py2
-rw-r--r--lib/spack/spack/test/spec_syntax.py34
-rw-r--r--lib/spack/spack/util/package_hash.py2
61 files changed, 498 insertions, 537 deletions
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 087a6b6be9..de5fdc52f1 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -531,3 +531,48 @@ class ObjectWrapper(object):
self.__class__ = type(wrapped_name, (wrapped_cls,), {})
self.__dict__ = wrapped_object.__dict__
+
+
+class Singleton(object):
+ """Simple wrapper for lazily initialized singleton objects."""
+
+ def __init__(self, factory):
+ """Create a new singleton to be inited with the factory function.
+
+ Args:
+ factory (function): function taking no arguments that
+ creates the singleton instance.
+ """
+ self.factory = factory
+ self._instance = None
+
+ @property
+ def instance(self):
+ if self._instance is None:
+ self._instance = self.factory()
+ return self._instance
+
+ def __getattr__(self, name):
+ return getattr(self.instance, name)
+
+ def __str__(self):
+ return str(self.instance)
+
+ def __repr__(self):
+ return repr(self.instance)
+
+
+class LazyReference(object):
+ """Lazily evaluated reference to part of a singleton."""
+
+ def __init__(self, ref_function):
+ self.ref_function = ref_function
+
+ def __getattr__(self, name):
+ return getattr(self.ref_function(), name)
+
+ def __str__(self):
+ return str(self.ref_function())
+
+ def __repr__(self):
+ return repr(self.ref_function())
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index a5bce56c79..5c4e913692 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -149,7 +149,7 @@ def write_buildinfo_file(prefix, workdir, rel=False):
# This cuts down on the number of files added to the list
# of files potentially needing relocation
if relocate.strings_contains_installroot(
- path_name, spack.store.store().layout.root):
+ path_name, spack.store.layout.root):
filetype = relocate.get_filetype(path_name)
if relocate.needs_binary_relocation(filetype, os_id):
rel_path_name = os.path.relpath(path_name, prefix)
@@ -161,9 +161,9 @@ def write_buildinfo_file(prefix, workdir, rel=False):
# Create buildinfo data and write it to disk
buildinfo = {}
buildinfo['relative_rpaths'] = rel
- buildinfo['buildpath'] = spack.store.store().layout.root
+ buildinfo['buildpath'] = spack.store.layout.root
buildinfo['relative_prefix'] = os.path.relpath(
- prefix, spack.store.store().layout.root)
+ prefix, spack.store.layout.root)
buildinfo['relocate_textfiles'] = text_to_relocate
buildinfo['relocate_binaries'] = binary_to_relocate
filename = buildinfo_file_name(workdir)
@@ -334,7 +334,7 @@ def build_tarball(spec, outdir, force=False, rel=False, unsigned=False,
# This will be used to determine is the directory layout has changed.
buildinfo = {}
buildinfo['relative_prefix'] = os.path.relpath(
- spec.prefix, spack.store.store().layout.root)
+ spec.prefix, spack.store.layout.root)
spec_dict['buildinfo'] = buildinfo
with open(specfile_path, 'w') as outfile:
outfile.write(yaml.dump(spec_dict))
@@ -414,7 +414,7 @@ def relocate_package(workdir, allow_root):
Relocate the given package
"""
buildinfo = read_buildinfo_file(workdir)
- new_path = spack.store.store().layout.root
+ new_path = spack.store.layout.root
old_path = buildinfo['buildpath']
rel = buildinfo.get('relative_rpaths', False)
if rel:
@@ -493,7 +493,7 @@ def extract_tarball(spec, filename, allow_root=False, unsigned=False,
"It cannot be installed.")
new_relative_prefix = str(os.path.relpath(spec.prefix,
- spack.store.store().layout.root))
+ spack.store.layout.root))
# if the original relative prefix is in the spec file use it
buildinfo = spec_dict.get('buildinfo', {})
old_relative_prefix = buildinfo.get('relative_prefix', new_relative_prefix)
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 4619c4c206..cdf3e86577 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -256,7 +256,7 @@ def set_build_environment_variables(pkg, env, dirty):
env.set(SPACK_PREFIX, pkg.prefix)
# Install root prefix
- env.set(SPACK_INSTALL, spack.store.store().root)
+ env.set(SPACK_INSTALL, spack.store.root)
# Stuff in here sanitizes the build environment to eliminate
# anything the user has set that may interfere.
diff --git a/lib/spack/spack/caches.py b/lib/spack/spack/caches.py
index 196cbb8bad..5f3d7406de 100644
--- a/lib/spack/spack/caches.py
+++ b/lib/spack/spack/caches.py
@@ -24,48 +24,47 @@
##############################################################################
"""Caches used by Spack to store data"""
import os
+
+import llnl.util.lang
+
import spack.paths
import spack.config
import spack.fetch_strategy
+import spack.util.file_cache
from spack.util.path import canonicalize_path
-from spack.util.file_cache import FileCache
-
-_misc_cache = None
-_fetch_cache = None
-
-def misc_cache():
+def _misc_cache():
"""The ``misc_cache`` is Spack's cache for small data.
Currently the ``misc_cache`` stores indexes for virtual dependency
providers and for which packages provide which tags.
"""
- global _misc_cache
+ path = spack.config.get('config:misc_cache')
+ if not path:
+ path = os.path.join(spack.paths.user_config_path, 'cache')
+ path = canonicalize_path(path)
+
+ return spack.util.file_cache.FileCache(path)
- if _misc_cache is None:
- path = spack.config.get('config:misc_cache')
- if not path:
- path = os.path.join(spack.paths.user_config_path, 'cache')
- path = canonicalize_path(path)
- _misc_cache = FileCache(path)
- return _misc_cache
+#: Spack's cache for small data
+misc_cache = llnl.util.lang.Singleton(_misc_cache)
-def fetch_cache():
+def _fetch_cache():
"""Filesystem cache of downloaded archives.
This prevents Spack from repeatedly fetch the same files when
building the same package different ways or multiple times.
"""
- global _fetch_cache
+ path = spack.config.get('config:source_cache')
+ if not path:
+ path = os.path.join(spack.paths.var_path, "cache")
+ path = canonicalize_path(path)
+
+ return spack.fetch_strategy.FsCache(path)
- if _fetch_cache is None:
- path = spack.config.get('config:source_cache')
- if not path:
- path = os.path.join(spack.paths.var_path, "cache")
- path = canonicalize_path(path)
- _fetch_cache = spack.fetch_strategy.FsCache(path)
- return _fetch_cache
+#: Spack's local cache for downloaded source archives
+fetch_cache = llnl.util.lang.Singleton(_fetch_cache)
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 6c71cf7de9..1a58560f66 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -49,7 +49,7 @@ def default_modify_scope():
Commands that modify configuration by default modify the *highest*
priority scope.
"""
- return spack.config.config().highest_precedence_scope().name
+ return spack.config.config.highest_precedence_scope().name
def default_list_scope():
@@ -197,7 +197,7 @@ def elide_list(line_list, max_num=10):
def disambiguate_spec(spec):
- matching_specs = spack.store.store().db.query(spec)
+ matching_specs = spack.store.db.query(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py
index 6c0e5f767d..34cf2ee720 100644
--- a/lib/spack/spack/cmd/activate.py
+++ b/lib/spack/spack/cmd/activate.py
@@ -54,10 +54,10 @@ def activate(parser, args):
if not spec.package.is_extension:
tty.die("%s is not an extension." % spec.name)
- layout = spack.store.store().extensions
+ layout = spack.store.extensions
if args.view is not None:
layout = YamlViewExtensionsLayout(
- args.view, spack.store.store().layout)
+ args.view, spack.store.layout)
if spec.package.is_activated(extensions_layout=layout):
tty.msg("Package %s is already activated." % specs[0].short_spec)
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index 650e0a8e8d..80abbd8045 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -68,7 +68,7 @@ def bootstrap(parser, args, **kwargs):
requirement_dict = {'environment-modules': 'environment-modules~X'}
for requirement in requirement_dict:
- installed_specs = spack.store.store().db.query(requirement)
+ installed_specs = spack.store.db.query(requirement)
if(len(installed_specs) > 0):
tty.msg("Requirement %s is satisfied with installed "
"package %s" % (requirement, installed_specs[0]))
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 6d6c49c5bd..a8ee3ae581 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -116,7 +116,7 @@ def find_matching_specs(pkgs, allow_multiple_matches=False, force=False):
has_errors = False
specs = spack.cmd.parse_specs(pkgs)
for spec in specs:
- matching = spack.store.store().db.query(spec)
+ matching = spack.store.db.query(spec)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
@@ -254,7 +254,7 @@ def install_tarball(spec, args):
tty.msg('Installing buildcache for spec %s' % spec.format())
bindist.extract_tarball(spec, tarball, args.allow_root,
args.unsigned, args.force)
- spack.store.store().reindex()
+ spack.store.store.reindex()
else:
tty.die('Download of binary cache file for spec %s failed.' %
spec.format())
diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py
index df0f62e167..fd122714a9 100644
--- a/lib/spack/spack/cmd/clean.py
+++ b/lib/spack/spack/cmd/clean.py
@@ -83,8 +83,8 @@ def clean(parser, args):
if args.downloads:
tty.msg('Removing cached downloads')
- spack.caches.fetch_cache().destroy()
+ spack.caches.fetch_cache.destroy()
if args.misc_cache:
tty.msg('Removing cached information on repositories')
- spack.caches.misc_cache().destroy()
+ spack.caches.misc_cache.destroy()
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
index 2b1a47c8df..ea5bc1c25d 100644
--- a/lib/spack/spack/cmd/common/arguments.py
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -73,12 +73,12 @@ class ConstraintAction(argparse.Action):
# return everything for an empty query.
if not qspecs:
- return spack.store.store().db.query(**kwargs)
+ return spack.store.db.query(**kwargs)
# Return only matching stuff otherwise.
specs = set()
for spec in qspecs:
- for s in spack.store.store().db.query(spec, **kwargs):
+ for s in spack.store.db.query(spec, **kwargs):
specs.add(s)
return sorted(specs)
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index e16cb90a6c..186e6adf7f 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -115,7 +115,7 @@ def compiler_find(args):
n = len(new_compilers)
s = 's' if n > 1 else ''
- config = spack.config.config()
+ config = spack.config.config
filename = config.get_config_filename(args.scope, 'compilers')
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index 984126f215..39728e4dc6 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -56,7 +56,7 @@ def setup_parser(subparser):
def config_get(args):
- spack.config.config().print_section(args.section)
+ spack.config.config.print_section(args.section)
def config_edit(args):
@@ -68,7 +68,7 @@ def config_edit(args):
if not args.section:
args.section = None
- config = spack.config.config()
+ config = spack.config.config
config_file = config.get_config_filename(args.scope, args.section)
spack.editor(config_file)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 6eab4e4834..899868ba12 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -654,11 +654,11 @@ def get_repository(args, name):
"namespace {1}".format(spec.namespace, repo.namespace))
else:
if spec.namespace:
- repo = spack.repo.path().get_repo(spec.namespace, None)
+ repo = spack.repo.path.get_repo(spec.namespace, None)
if not repo:
tty.die("Unknown namespace: '{0}'".format(spec.namespace))
else:
- repo = spack.repo.path().first_repo()
+ repo = spack.repo.path.first_repo()
# Set the namespace on the spec if it's not there already
if not spec.namespace:
diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py
index adac9e5e0f..841a295cdb 100644
--- a/lib/spack/spack/cmd/deactivate.py
+++ b/lib/spack/spack/cmd/deactivate.py
@@ -60,15 +60,15 @@ def deactivate(parser, args):
spec = spack.cmd.disambiguate_spec(specs[0])
pkg = spec.package
- layout = spack.store.store().extensions
+ layout = spack.store.extensions
if args.view is not None:
layout = YamlViewExtensionsLayout(
- args.view, spack.store.store().layout)
+ args.view, spack.store.layout)
if args.all:
if pkg.extendable:
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
- ext_pkgs = spack.store.store().db.activated_extensions_for(
+ ext_pkgs = spack.store.db.activated_extensions_for(
spec, extensions_layout=layout)
for ext_pkg in ext_pkgs:
diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py
index 4aa6f25fa0..2a85be8a00 100644
--- a/lib/spack/spack/cmd/debug.py
+++ b/lib/spack/spack/cmd/debug.py
@@ -76,16 +76,16 @@ def create_db_tarball(args):
tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
tarball_path = os.path.abspath(tarball_name)
- base = os.path.basename(spack.store.store().root)
+ base = os.path.basename(spack.store.root)
transform_args = []
if 'GNU' in tar('--version', output=str):
transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)]
else:
transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)]
- wd = os.path.dirname(spack.store.store().root)
+ wd = os.path.dirname(spack.store.root)
with working_dir(wd):
- files = [spack.store.store().db._index_path]
+ files = [spack.store.db._index_path]
files += glob('%s/*/*/*/.spack/spec.yaml' % base)
files = [os.path.relpath(f) for f in files]
diff --git a/lib/spack/spack/cmd/dependencies.py b/lib/spack/spack/cmd/dependencies.py
index f39ddb4dab..d36b1c5f71 100644
--- a/lib/spack/spack/cmd/dependencies.py
+++ b/lib/spack/spack/cmd/dependencies.py
@@ -60,7 +60,7 @@ def dependencies(parser, args):
spec = spack.cmd.disambiguate_spec(specs[0])
tty.msg("Dependencies of %s" % spec.format('$_$@$%@$/', color=True))
- deps = spack.store.store().db.installed_relatives(
+ deps = spack.store.db.installed_relatives(
spec, 'children', args.transitive)
if deps:
spack.cmd.display_specs(deps, long=True)
@@ -75,7 +75,7 @@ def dependencies(parser, args):
else:
packages = [
spack.repo.get(s.name)
- for s in spack.repo.path().providers_for(spec)]
+ for s in spack.repo.path.providers_for(spec)]
dependencies = set()
for pkg in packages:
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index 129ee56435..89d2f42dfa 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -57,14 +57,14 @@ def inverted_dependencies():
actual dependents.
"""
dag = {}
- for pkg in spack.repo.path().all_packages():
+ for pkg in spack.repo.path.all_packages():
dag.setdefault(pkg.name, set())
for dep in pkg.dependencies:
deps = [dep]
# expand virtuals if necessary
- if spack.repo.path().is_virtual(dep):
- deps += [s.name for s in spack.repo.path().providers_for(dep)]
+ if spack.repo.path.is_virtual(dep):
+ deps += [s.name for s in spack.repo.path.providers_for(dep)]
for d in deps:
dag.setdefault(d, set()).add(pkg.name)
@@ -103,7 +103,7 @@ def dependents(parser, args):
spec = spack.cmd.disambiguate_spec(specs[0])
tty.msg("Dependents of %s" % spec.cformat('$_$@$%@$/'))
- deps = spack.store.store().db.installed_relatives(
+ deps = spack.store.db.installed_relatives(
spec, 'parents', args.transitive)
if deps:
spack.cmd.display_specs(deps, long=True)
diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py
index eec6f3159c..b6d3bfdfd3 100644
--- a/lib/spack/spack/cmd/diy.py
+++ b/lib/spack/spack/cmd/diy.py
@@ -78,7 +78,7 @@ def diy(self, args):
tty.die("spack diy only takes one spec.")
spec = specs[0]
- if not spack.repo.path().exists(spec.name):
+ if not spack.repo.path.exists(spec.name):
tty.die("No package for '{0}' was found.".format(spec.name),
" Use `spack create` to create a new package")
diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py
index cd9a5ccee7..4031e5549a 100644
--- a/lib/spack/spack/cmd/edit.py
+++ b/lib/spack/spack/cmd/edit.py
@@ -50,9 +50,9 @@ def edit_package(name, repo_path, namespace):
if repo_path:
repo = spack.repo.Repo(repo_path)
elif namespace:
- repo = spack.repo.path().get_repo(namespace)
+ repo = spack.repo.path.get_repo(namespace)
else:
- repo = spack.repo.path()
+ repo = spack.repo.path
path = repo.filename_for_package_name(name)
spec = Spec(name)
diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py
index 56463ae7c2..a428e85035 100644
--- a/lib/spack/spack/cmd/extensions.py
+++ b/lib/spack/spack/cmd/extensions.py
@@ -105,7 +105,7 @@ def extensions(parser, args):
if show_packages:
#
# List package names of extensions
- extensions = spack.repo.path().extensions_for(spec)
+ extensions = spack.repo.path.extensions_for(spec)
if not extensions:
tty.msg("%s has no extensions." % spec.cshort_spec)
else:
@@ -113,17 +113,17 @@ def extensions(parser, args):
tty.msg("%d extensions:" % len(extensions))
colify(ext.name for ext in extensions)
- layout = spack.store.store().extensions
+ layout = spack.store.extensions
if args.view is not None:
layout = YamlViewExtensionsLayout(
- args.view, spack.store.store().layout)
+ args.view, spack.store.layout)
if show_installed:
#
# List specs of installed extensions.
#
- store = spack.store.store()
- installed = [s.spec for s in store.db.installed_extensions_for(spec)]
+ installed = [
+ s.spec for s in spack.store.db.installed_extensions_for(spec)]
if show_all:
print
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 3b6de0ca84..63e1fe0475 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -147,7 +147,7 @@ def find(parser, args):
# If tags have been specified on the command line, filter by tags
if args.tags:
- packages_with_tags = spack.repo.path().packages_with_tags(*args.tags)
+ packages_with_tags = spack.repo.path.packages_with_tags(*args.tags)
query_specs = [x for x in query_specs if x.name in packages_with_tags]
# Display the result
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index 81e6e6119c..305835ff34 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -77,7 +77,7 @@ def graph(parser, args):
if args.specs:
tty.die("Can't specify specs with --installed")
args.dot = True
- specs = spack.store.store().db.query()
+ specs = spack.store.db.query()
else:
specs = spack.cmd.parse_specs(
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 9c074a5794..93aeb1009a 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -244,7 +244,7 @@ def install(parser, args, **kwargs):
"only one spec is allowed when overwriting an installation"
spec = specs[0]
- t = spack.store.store().db.query(spec)
+ t = spack.store.db.query(spec)
assert len(t) == 1, "to overwrite a spec you must install it first"
# Give the user a last chance to think about overwriting an already
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index 42370d1a10..c32f3556c2 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -303,7 +303,7 @@ def list(parser, args):
# Filter by tags
if args.tags:
packages_with_tags = set(
- spack.repo.path().packages_with_tags(*args.tags))
+ spack.repo.path.packages_with_tags(*args.tags))
sorted_packages = set(sorted_packages) & packages_with_tags
sorted_packages = sorted(sorted_packages)
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index 847a56bd5e..308047c40f 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -80,7 +80,7 @@ def location(parser, args):
print(spack.paths.prefix)
elif args.packages:
- print(spack.repo.path().first_repo().root)
+ print(spack.repo.path.first_repo().root)
elif args.stages:
print(spack.paths.stage_path)
@@ -102,7 +102,7 @@ def location(parser, args):
if args.package_dir:
# This one just needs the spec name.
- print(spack.repo.path().dirname_for_package_name(spec.name))
+ print(spack.repo.path.dirname_for_package_name(spec.name))
else:
# These versions need concretized specs.
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 6ec069e50f..11bbaf7759 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -283,7 +283,7 @@ def refresh(module_types, specs, args):
# skip unknown packages.
writers = [
cls(spec) for spec in specs
- if spack.repo.path().exists(spec.name)]
+ if spack.repo.path.exists(spec.name)]
# Filter blacklisted packages early
writers = [x for x in writers if not x.conf.blacklisted]
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index 2562a3a58a..0d64e9900d 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -91,7 +91,7 @@ def list_packages(rev):
def pkg_add(args):
for pkg_name in args.packages:
- filename = spack.repo.path().filename_for_package_name(pkg_name)
+ filename = spack.repo.path.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
tty.die("No such package: %s. Path does not exist:" %
pkg_name, filename)
diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py
index d9622ea0d3..c18cac10c5 100644
--- a/lib/spack/spack/cmd/providers.py
+++ b/lib/spack/spack/cmd/providers.py
@@ -46,7 +46,7 @@ def setup_parser(subparser):
def providers(parser, args):
- valid_virtuals = sorted(spack.repo.path().provider_index.providers.keys())
+ valid_virtuals = sorted(spack.repo.path.provider_index.providers.keys())
buffer = six.StringIO()
isatty = sys.stdout.isatty()
@@ -77,5 +77,5 @@ def providers(parser, args):
for spec in specs:
if sys.stdout.isatty():
print("{0}:".format(spec))
- spack.cmd.display_specs(sorted(spack.repo.path().providers_for(spec)))
+ spack.cmd.display_specs(sorted(spack.repo.path.providers_for(spec)))
print('')
diff --git a/lib/spack/spack/cmd/reindex.py b/lib/spack/spack/cmd/reindex.py
index 647c4b73a9..580fa977b4 100644
--- a/lib/spack/spack/cmd/reindex.py
+++ b/lib/spack/spack/cmd/reindex.py
@@ -31,4 +31,4 @@ level = "long"
def reindex(parser, args):
- spack.store.store().reindex()
+ spack.store.store.reindex()
diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py
index 60b814b6c2..ad603704a3 100644
--- a/lib/spack/spack/cmd/setup.py
+++ b/lib/spack/spack/cmd/setup.py
@@ -135,9 +135,9 @@ def setup(self, args):
tty.die("spack setup only takes one spec.")
# Take a write lock before checking for existence.
- with spack.store.store().db.write_transaction():
+ with spack.store.db.write_transaction():
spec = specs[0]
- if not spack.repo.path().exists(spec.name):
+ if not spack.repo.path.exists(spec.name):
tty.die("No package for '{0}' was found.".format(spec.name),
" Use `spack create` to create a new package")
if not spec.versions.concrete:
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index f3c7b77b1d..31cbd80d2e 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -93,7 +93,7 @@ def find_matching_specs(specs, allow_multiple_matches=False, force=False):
specs_from_cli = []
has_errors = False
for spec in specs:
- matching = spack.store.store().db.query(spec)
+ matching = spack.store.db.query(spec)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
@@ -128,7 +128,7 @@ def installed_dependents(specs):
"""
dependents = {}
for item in specs:
- installed = spack.store.store().db.installed_relatives(
+ installed = spack.store.db.installed_relatives(
item, 'parents', True)
lst = [x for x in installed if x not in specs]
if lst:
@@ -158,7 +158,7 @@ def do_uninstall(specs, force):
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order
def num_installed_deps(pkg):
- dependents = spack.store.store().db.installed_relatives(
+ dependents = spack.store.db.installed_relatives(
pkg.spec, 'parents', True)
return len(dependents)
diff --git a/lib/spack/spack/cmd/url.py b/lib/spack/spack/cmd/url.py
index cb2695833d..133cea0257 100644
--- a/lib/spack/spack/cmd/url.py
+++ b/lib/spack/spack/cmd/url.py
@@ -144,7 +144,7 @@ def url_list(args):
urls = set()
# Gather set of URLs from all packages
- for pkg in spack.repo.path().all_packages():
+ for pkg in spack.repo.path.all_packages():
url = getattr(pkg.__class__, 'url', None)
urls = url_list_parsing(args, urls, url, pkg)
@@ -178,7 +178,7 @@ def url_summary(args):
tty.msg('Generating a summary of URL parsing in Spack...')
# Loop through all packages
- for pkg in spack.repo.path().all_packages():
+ for pkg in spack.repo.path.all_packages():
urls = set()
url = getattr(pkg.__class__, 'url', None)
diff --git a/lib/spack/spack/cmd/view.py b/lib/spack/spack/cmd/view.py
index 009529d848..71333a088f 100644
--- a/lib/spack/spack/cmd/view.py
+++ b/lib/spack/spack/cmd/view.py
@@ -93,7 +93,7 @@ def relaxed_disambiguate(specs, view):
return matching_specs[0]
# make function always return a list to keep consistency between py2/3
- return list(map(squash, map(spack.store.store().db.query, specs)))
+ return list(map(squash, map(spack.store.db.query, specs)))
def setup_parser(sp):
@@ -176,7 +176,7 @@ def view(parser, args):
path = args.path[0]
view = YamlFilesystemView(
- path, spack.store.store().layout,
+ path, spack.store.layout,
ignore_conflicts=getattr(args, "ignore_conflicts", False),
link=os.link if args.action in ["hardlink", "hard"]
else os.symlink,
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index c45e838374..45245e56c4 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -116,7 +116,7 @@ def get_compiler_config(scope=None, init_config=True):
def compiler_config_files():
config_files = list()
- config = spack.config.config()
+ config = spack.config.config
for scope in config.file_scopes:
name = scope.name
compiler_config = config.get('compilers', scope=name)
@@ -339,7 +339,7 @@ def compiler_for_spec(compiler_spec, arch_spec):
@_auto_compiler_spec
def get_compiler_duplicates(compiler_spec, arch_spec):
- config = spack.config.config()
+ config = spack.config.config
scope_to_compilers = {}
for scope in config.scopes:
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index d9880e3755..2f57cb7504 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -39,6 +39,8 @@ from functools_backport import reverse_order
from contextlib import contextmanager
from six import iteritems
+import llnl.util.lang
+
import spack.repo
import spack.abi
import spack.spec
@@ -54,27 +56,11 @@ check_for_compiler_existence = True
#: Concretizer singleton
-_concretizer = None
+concretizer = llnl.util.lang.Singleton(lambda: Concretizer())
#: impements rudimentary logic for ABI compatibility
-_abi_checker = None
-
-
-def _abi():
- """Get an ABI checker object."""
- global _abi_checker
- if _abi_checker is None:
- _abi_checker = spack.abi.ABI()
- return _abi_checker
-
-
-def concretizer():
- """Get concretizer singleton."""
- global _concretizer
- if _concretizer is None:
- _concretizer = Concretizer()
- return _concretizer
+_abi = llnl.util.lang.Singleton(lambda: spack.abi.ABI())
@contextmanager
@@ -102,7 +88,7 @@ class Concretizer(object):
pref_key = lambda spec: 0 # no-op pref key
if spec.virtual:
- candidates = spack.repo.path().providers_for(spec)
+ candidates = spack.repo.path.providers_for(spec)
if not candidates:
raise spack.spec.UnsatisfiableProviderSpecError(
candidates[0], spec)
@@ -163,8 +149,8 @@ class Concretizer(object):
return sorted(candidates,
reverse=True,
key=lambda spec: (
- _abi().compatible(spec, abi_exemplar, loose=True),
- _abi().compatible(spec, abi_exemplar)))
+ _abi.compatible(spec, abi_exemplar, loose=True),
+ _abi.compatible(spec, abi_exemplar)))
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index 149ed5088d..5157e918aa 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -61,6 +61,7 @@ from six import iteritems
import yaml
from yaml.error import MarkedYAMLError
+import llnl.util.lang
import llnl.util.tty as tty
from llnl.util.filesystem import mkdirp
@@ -456,17 +457,16 @@ def override(path, value):
"""Simple way to override config settings within a context."""
overrides = InternalConfigScope('overrides')
- cfg = config()
- cfg.push_scope(overrides)
- cfg.set(path, value, scope='overrides')
+ config.push_scope(overrides)
+ config.set(path, value, scope='overrides')
- yield cfg
+ yield config
- scope = cfg.pop_scope()
+ scope = config.pop_scope()
assert scope is overrides
-def config():
+def _config():
"""Singleton Configuration instance.
This constructs one instance associated with this module and returns
@@ -477,40 +477,37 @@ def config():
(Configuration): object for accessing spack configuration
"""
- global _configuration
- if not _configuration:
- _configuration = Configuration()
+ cfg = Configuration()
- # first do the builtin, hardcoded defaults
- defaults = InternalConfigScope('_builtin', config_defaults)
- _configuration.push_scope(defaults)
+ # first do the builtin, hardcoded defaults
+ defaults = InternalConfigScope('_builtin', config_defaults)
+ cfg.push_scope(defaults)
- # Each scope can have per-platfom overrides in subdirectories
- platform = spack.architecture.platform().name
+ # Each scope can have per-platfom overrides in subdirectories
+ platform = spack.architecture.platform().name
- # add each scope and its platform-specific directory
- for name, path in configuration_paths:
- _configuration.push_scope(ConfigScope(name, path))
+ # add each scope and its platform-specific directory
+ for name, path in configuration_paths:
+ cfg.push_scope(ConfigScope(name, path))
- plat_name = '%s/%s' % (name, platform)
- plat_path = os.path.join(path, platform)
- _configuration.push_scope(ConfigScope(plat_name, plat_path))
+ plat_name = '%s/%s' % (name, platform)
+ plat_path = os.path.join(path, platform)
+ cfg.push_scope(ConfigScope(plat_name, plat_path))
- # we make a special scope for spack commands so that they can
- # override configuration options.
- _configuration.push_scope(InternalConfigScope('command_line'))
+ # we make a special scope for spack commands so that they can
+ # override configuration options.
+ cfg.push_scope(InternalConfigScope('command_line'))
- return _configuration
+ return cfg
-#: This is the global singleton configuration for Spack.
-#: TODO: consider making this NOT global and associate it with a spack instance
-_configuration = None
+#: This is the singleton configuration instance for Spack.
+config = llnl.util.lang.Singleton(_config)
def get(path, default=None, scope=None):
"""Module-level wrapper for ``Configuration.get()``."""
- return config().get(path, default, scope)
+ return config.get(path, default, scope)
def set(path, value, scope=None):
@@ -518,12 +515,12 @@ def set(path, value, scope=None):
Accepts the path syntax described in ``get()``.
"""
- return config().set(path, value, scope)
+ return config.set(path, value, scope)
def scopes():
"""Convenience function to get list of configuration scopes."""
- return config().scopes
+ return config.scopes
def _validate_section_name(section):
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index 97954d67f9..3a5f4b73cb 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -223,7 +223,7 @@ class Database(object):
Prefix lock is a byte range lock on the nth byte of a file.
- The lock file is ``spack.store.store().db.prefix_lock`` -- the DB
+ The lock file is ``spack.store.db.prefix_lock`` -- the DB
tells us what to call it and it lives alongside the install DB.
n is the sys.maxsize-bit prefix of the DAG hash. This makes
@@ -366,7 +366,7 @@ class Database(object):
if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
elif version < _db_version:
- self.reindex(spack.store.store().layout)
+ self.reindex(spack.store.layout)
installs = dict((k, v.to_dict()) for k, v in self._data.items())
def invalid_record(hash_key, error):
@@ -499,7 +499,7 @@ class Database(object):
tty.debug(
'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
try:
- layout = spack.store.store().layout
+ layout = spack.store.layout
if entry.spec.external:
layout = None
install_check = True
@@ -609,7 +609,7 @@ class Database(object):
# reindex() takes its own write lock, so no lock here.
with WriteTransaction(self.lock, timeout=_db_lock_timeout):
self._write(None, None, None)
- self.reindex(spack.store.store().layout)
+ self.reindex(spack.store.layout)
def _add(
self,
@@ -823,7 +823,7 @@ class Database(object):
the given spec
"""
if extensions_layout is None:
- extensions_layout = spack.store.store().extensions
+ extensions_layout = spack.store.extensions
for spec in self.query():
try:
extensions_layout.check_activated(extendee_spec, spec)
@@ -903,7 +903,7 @@ class Database(object):
if explicit is not any and rec.explicit != explicit:
continue
- if known is not any and spack.repo.path().exists(
+ if known is not any and spack.repo.path.exists(
rec.spec.name) != known:
continue
diff --git a/lib/spack/spack/filesystem_view.py b/lib/spack/spack/filesystem_view.py
index 0c5f9a6a6c..51a673d486 100644
--- a/lib/spack/spack/filesystem_view.py
+++ b/lib/spack/spack/filesystem_view.py
@@ -267,7 +267,7 @@ class YamlFilesystemView(FilesystemView):
# Check for globally activated extensions in the extendee that
# we're looking at.
activated = [p.spec for p in
- spack.store.store().db.activated_extensions_for(spec)]
+ spack.store.db.activated_extensions_for(spec)]
if activated:
tty.error("Globally activated extensions cannot be used in "
"conjunction with filesystem views. "
@@ -391,7 +391,7 @@ class YamlFilesystemView(FilesystemView):
def get_all_specs(self):
dotspack = join_path(self.root,
- spack.store.store().layout.metadata_dir)
+ spack.store.layout.metadata_dir)
if os.path.exists(dotspack):
return list(filter(None, map(self.get_spec, os.listdir(dotspack))))
else:
@@ -409,13 +409,13 @@ class YamlFilesystemView(FilesystemView):
def get_path_meta_folder(self, spec):
"Get path to meta folder for either spec or spec name."
return join_path(self.root,
- spack.store.store().layout.metadata_dir,
+ spack.store.layout.metadata_dir,
getattr(spec, "name", spec))
def get_spec(self, spec):
dotspack = self.get_path_meta_folder(spec)
filename = join_path(dotspack,
- spack.store.store().layout.spec_file_name)
+ spack.store.layout.spec_file_name)
try:
with open(filename, "r") as f:
@@ -424,7 +424,7 @@ class YamlFilesystemView(FilesystemView):
return None
def link_meta_folder(self, spec):
- src = spack.store.store().layout.metadata_path(spec)
+ src = spack.store.layout.metadata_path(spec)
tgt = self.get_path_meta_folder(spec)
tree = LinkTree(src)
@@ -550,4 +550,4 @@ def get_dependencies(specs):
def ignore_metadata_dir(f):
- return f in spack.store.store().layout.hidden_file_paths
+ return f in spack.store.layout.hidden_file_paths
diff --git a/lib/spack/spack/hooks/yaml_version_check.py b/lib/spack/spack/hooks/yaml_version_check.py
index af9d2443ca..0471712332 100644
--- a/lib/spack/spack/hooks/yaml_version_check.py
+++ b/lib/spack/spack/hooks/yaml_version_check.py
@@ -36,7 +36,7 @@ def pre_run():
def check_compiler_yaml_version():
- config = spack.config.config()
+ config = spack.config.config
for scope in config.file_scopes:
file_name = os.path.join(scope.path, 'compilers.yaml')
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index f361763878..9e4150b9b3 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -661,9 +661,9 @@ class PackageBase(with_metaclass(PackageMeta, object)):
visited = set([self.name])
for i, name in enumerate(self.dependencies):
- if spack.repo.path().is_virtual(name):
+ if spack.repo.path.is_virtual(name):
if expand_virtuals:
- providers = spack.repo.path().providers_for(name)
+ providers = spack.repo.path.providers_for(name)
dep_names = [spec.name for spec in providers]
else:
visited.add(name)
@@ -949,7 +949,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
raise ValueError(
"is_extension called on package that is not an extension.")
if extensions_layout is None:
- extensions_layout = spack.store.store().extensions
+ extensions_layout = spack.store.extensions
exts = extensions_layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
@@ -1003,7 +1003,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
Removes the prefix for a package along with any empty parent
directories
"""
- spack.store.store().layout.remove_install_directory(self.spec)
+ spack.store.layout.remove_install_directory(self.spec)
def do_fetch(self, mirror_only=False):
"""
@@ -1241,8 +1241,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
# Install fake man page
mkdirp(self.prefix.man.man1)
- store = spack.store.store()
- packages_dir = store.layout.build_packages_path(self.spec)
+ packages_dir = spack.store.layout.build_packages_path(self.spec)
dump_packages(self.spec, packages_dir)
def _if_make_target_execute(self, target):
@@ -1305,7 +1304,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
def _stage_and_write_lock(self):
"""Prefix lock nested in a stage."""
with self.stage:
- with spack.store.store().db.prefix_write_lock(self.spec):
+ with spack.store.db.prefix_write_lock(self.spec):
yield
def _process_external_package(self, explicit):
@@ -1329,7 +1328,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
try:
# Check if the package was already registered in the DB
# If this is the case, then just exit
- rec = spack.store.store().db.get_record(self.spec)
+ rec = spack.store.db.get_record(self.spec)
message = '{s.name}@{s.version} : already registered in DB'
tty.msg(message.format(s=self))
# Update the value of rec.explicit if it is necessary
@@ -1345,12 +1344,12 @@ class PackageBase(with_metaclass(PackageMeta, object)):
# Add to the DB
message = '{s.name}@{s.version} : registering into DB'
tty.msg(message.format(s=self))
- spack.store.store().db.add(self.spec, None, explicit=explicit)
+ spack.store.db.add(self.spec, None, explicit=explicit)
def _update_explicit_entry_in_db(self, rec, explicit):
if explicit and not rec.explicit:
- with spack.store.store().db.write_transaction():
- rec = spack.store.store().db.get_record(self.spec)
+ with spack.store.db.write_transaction():
+ rec = spack.store.db.get_record(self.spec)
rec.explicit = True
message = '{s.name}@{s.version} : marking the package explicit'
tty.msg(message.format(s=self))
@@ -1367,7 +1366,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
binary_distribution.extract_tarball(
binary_spec, tarball, allow_root=False,
unsigned=False, force=False)
- spack.store.store().db.add(
+ spack.store.db.add(
self.spec, spack.store.layout, explicit=explicit)
return True
@@ -1425,15 +1424,15 @@ class PackageBase(with_metaclass(PackageMeta, object)):
partial = self.check_for_unfinished_installation(keep_prefix, restage)
# Ensure package is not already installed
- layout = spack.store.store().layout
- with spack.store.store().db.prefix_read_lock(self.spec):
+ layout = spack.store.layout
+ with spack.store.db.prefix_read_lock(self.spec):
if partial:
tty.msg(
"Continuing from partial install of %s" % self.name)
elif layout.check_installed(self.spec):
msg = '{0.name} is already installed in {0.prefix}'
tty.msg(msg.format(self))
- rec = spack.store.store().db.get_record(self.spec)
+ rec = spack.store.db.get_record(self.spec)
# In case the stage directory has already been created,
# this ensures it's removed after we checked that the spec
# is installed
@@ -1568,7 +1567,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
try:
# Create the install prefix and fork the build process.
if not os.path.exists(self.prefix):
- spack.store.store().layout.create_install_directory(self.spec)
+ spack.store.layout.create_install_directory(self.spec)
# Fork a child to do the actual installation
# we preserve verbosity settings across installs.
@@ -1579,8 +1578,8 @@ class PackageBase(with_metaclass(PackageMeta, object)):
keep_prefix = self.last_phase is None or keep_prefix
# note: PARENT of the build process adds the new package to
# the database, so that we don't need to re-read from file.
- spack.store.store().db.add(
- self.spec, spack.store.store().layout, explicit=explicit
+ spack.store.db.add(
+ self.spec, spack.store.layout, explicit=explicit
)
except directory_layout.InstallDirectoryAlreadyExistsError:
# Abort install if install directory exists.
@@ -1634,9 +1633,9 @@ class PackageBase(with_metaclass(PackageMeta, object)):
raise ExternalPackageError("Attempted to repair external spec %s" %
self.spec.name)
- with spack.store.store().db.prefix_write_lock(self.spec):
+ with spack.store.db.prefix_write_lock(self.spec):
try:
- record = spack.store.store().db.get_record(self.spec)
+ record = spack.store.db.get_record(self.spec)
installed_in_db = record.installed if record else False
except KeyError:
installed_in_db = False
@@ -1671,10 +1670,9 @@ class PackageBase(with_metaclass(PackageMeta, object)):
def log(self):
# Copy provenance into the install directory on success
- store = spack.store.store()
- log_install_path = store.layout.build_log_path(self.spec)
- env_install_path = store.layout.build_env_path(self.spec)
- packages_dir = store.layout.build_packages_path(self.spec)
+ log_install_path = spack.store.layout.build_log_path(self.spec)
+ env_install_path = spack.store.layout.build_env_path(self.spec)
+ packages_dir = spack.store.layout.build_packages_path(self.spec)
# Remove first if we're overwriting another build
# (can happen with spack setup)
@@ -1693,7 +1691,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
with working_dir(self.stage.source_path):
errors = StringIO()
target_dir = os.path.join(
- spack.store.store().layout.metadata_path(self.spec),
+ spack.store.layout.metadata_path(self.spec),
'archived-files')
for glob_expr in self.archive_files:
@@ -1754,7 +1752,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
installed = set(os.listdir(self.prefix))
installed.difference_update(
- spack.store.store().layout.hidden_file_paths)
+ spack.store.layout.hidden_file_paths)
if not installed:
raise InstallError(
"Install failed for %s. Nothing was installed!" % self.name)
@@ -1762,7 +1760,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
@property
def build_log_path(self):
if self.installed:
- return spack.store.store().layout.build_log_path(self.spec)
+ return spack.store.layout.build_log_path(self.spec)
else:
return join_path(self.stage.source_path, 'spack-build.out')
@@ -1920,16 +1918,16 @@ class PackageBase(with_metaclass(PackageMeta, object)):
if not os.path.isdir(spec.prefix):
# prefix may not exist, but DB may be inconsistent. Try to fix by
# removing, but omit hooks.
- specs = spack.store.store().db.query(spec, installed=True)
+ specs = spack.store.db.query(spec, installed=True)
if specs:
- spack.store.store().db.remove(specs[0])
+ spack.store.db.remove(specs[0])
tty.msg("Removed stale DB entry for %s" % spec.short_spec)
return
else:
raise InstallError(str(spec) + " is not installed.")
if not force:
- dependents = spack.store.store().db.installed_relatives(
+ dependents = spack.store.db.installed_relatives(
spec, 'parents', True)
if dependents:
raise PackageStillNeededError(spec, dependents)
@@ -1941,7 +1939,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
pkg = None
# Pre-uninstall hook runs first.
- with spack.store.store().db.prefix_write_lock(spec):
+ with spack.store.db.prefix_write_lock(spec):
if pkg is not None:
spack.hooks.pre_uninstall(spec)
@@ -1950,11 +1948,11 @@ class PackageBase(with_metaclass(PackageMeta, object)):
if not spec.external:
msg = 'Deleting package prefix [{0}]'
tty.debug(msg.format(spec.short_spec))
- spack.store.store().layout.remove_install_directory(spec)
+ spack.store.layout.remove_install_directory(spec)
# Delete DB entry
msg = 'Deleting DB entry [{0}]'
tty.debug(msg.format(spec.short_spec))
- spack.store.store().db.remove(spec)
+ spack.store.db.remove(spec)
if pkg is not None:
spack.hooks.post_uninstall(spec)
@@ -2000,7 +1998,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
self._sanity_check_extension()
if extensions_layout is None:
- extensions_layout = spack.store.store().extensions
+ extensions_layout = spack.store.extensions
extensions_layout.check_extension_conflict(
self.extendee_spec, self.spec)
@@ -2044,11 +2042,11 @@ class PackageBase(with_metaclass(PackageMeta, object)):
"""
extensions_layout = kwargs.get("extensions_layout",
- spack.store.store().extensions)
+ spack.store.extensions)
target = extensions_layout.extendee_target_directory(self)
def ignore(filename):
- return (filename in spack.store.store().layout.hidden_file_paths or
+ return (filename in spack.store.layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
@@ -2076,7 +2074,7 @@ class PackageBase(with_metaclass(PackageMeta, object)):
verbose = kwargs.get("verbose", True)
remove_dependents = kwargs.get("remove_dependents", False)
extensions_layout = kwargs.get("extensions_layout",
- spack.store.store().extensions)
+ spack.store.extensions)
# Allow a force deactivate to happen. This can unlink
# spurious files if something was corrupted.
@@ -2128,11 +2126,11 @@ class PackageBase(with_metaclass(PackageMeta, object)):
"""
extensions_layout = kwargs.get("extensions_layout",
- spack.store.store().extensions)
+ spack.store.extensions)
target = extensions_layout.extendee_target_directory(self)
def ignore(filename):
- return (filename in spack.store.store().layout.hidden_file_paths or
+ return (filename in spack.store.layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
@@ -2277,7 +2275,7 @@ def flatten_dependencies(spec, flat_dir):
for dep in spec.traverse(root=False):
name = dep.name
- dep_path = spack.store.store().layout.path_for_spec(dep)
+ dep_path = spack.store.layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
os.mkdir(flat_dir + '/' + name)
@@ -2306,7 +2304,7 @@ def dump_packages(spec, path):
if node is not spec:
# Locate the dependency package in the install tree and find
# its provenance information.
- source = spack.store.store().layout.build_packages_path(node)
+ source = spack.store.layout.build_packages_path(node)
source_repo_root = join_path(source, node.namespace)
# There's no provenance installed for the source package. Skip it.
@@ -2334,7 +2332,7 @@ def dump_packages(spec, path):
if node is not spec:
install_tree(source_pkg_dir, dest_pkg_dir)
else:
- spack.repo.path().dump_provenance(node, dest_pkg_dir)
+ spack.repo.path.dump_provenance(node, dest_pkg_dir)
def print_pkg(message):
diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py
index ce84606b00..f8e2499bb1 100644
--- a/lib/spack/spack/package_prefs.py
+++ b/lib/spack/spack/package_prefs.py
@@ -51,7 +51,7 @@ def get_packages_config():
# by sometihng, not just packages/names that don't exist.
# So, this won't include, e.g., 'all'.
virtuals = [(pkg_name, pkg_name._start_mark) for pkg_name in config
- if spack.repo.path().is_virtual(pkg_name)]
+ if spack.repo.path.is_virtual(pkg_name)]
# die if there are virtuals in `packages.py`
if virtuals:
diff --git a/lib/spack/spack/relocate.py b/lib/spack/spack/relocate.py
index 303776557b..f90556b7b1 100644
--- a/lib/spack/spack/relocate.py
+++ b/lib/spack/spack/relocate.py
@@ -110,7 +110,7 @@ def get_placeholder_rpaths(path_name, orig_rpaths):
Replaces original layout root dir with a placeholder string in all rpaths.
"""
rel_rpaths = []
- orig_dir = spack.store.store().layout.root
+ orig_dir = spack.store.layout.root
for rpath in orig_rpaths:
if re.match(orig_dir, rpath):
placeholder = set_placeholder(orig_dir)
@@ -186,7 +186,7 @@ def macho_make_paths_placeholder(rpaths, deps, idpath):
replacement are returned.
"""
new_idpath = None
- old_dir = spack.store.store().layout.root
+ old_dir = spack.store.layout.root
placeholder = set_placeholder(old_dir)
if idpath:
new_idpath = re.sub(old_dir, placeholder, idpath)
@@ -425,9 +425,9 @@ def make_binary_placeholder(cur_path_names, allow_root):
new_rpaths, new_deps, new_idpath)
if (not allow_root and
strings_contains_installroot(cur_path,
- spack.store.store().layout.root)):
+ spack.store.layout.root)):
raise InstallRootStringException(
- cur_path, spack.store.store().layout.root)
+ cur_path, spack.store.layout.root)
elif platform.system() == 'Linux':
for cur_path in cur_path_names:
orig_rpaths = get_existing_elf_rpaths(cur_path)
@@ -436,9 +436,9 @@ def make_binary_placeholder(cur_path_names, allow_root):
modify_elf_object(cur_path, new_rpaths)
if (not allow_root and
strings_contains_installroot(
- cur_path, spack.store.store().layout.root)):
+ cur_path, spack.store.layout.root)):
raise InstallRootStringException(
- cur_path, spack.store.store().layout.root)
+ cur_path, spack.store.layout.root)
else:
tty.die("Placeholder not implemented for %s" % platform.system())
diff --git a/lib/spack/spack/repo.py b/lib/spack/spack/repo.py
index 448d31ae1d..dd6b6ac71d 100644
--- a/lib/spack/spack/repo.py
+++ b/lib/spack/spack/repo.py
@@ -224,7 +224,7 @@ class TagIndex(Mapping):
pkg_name (str): name of the package to be removed from the index
"""
- package = path().get(pkg_name)
+ package = path.get(pkg_name)
# Remove the package from the list of packages, if present
for pkg_list in self._tag_dict.values():
@@ -255,7 +255,7 @@ def make_provider_index_cache(packages_path, namespace):
cache_filename = 'providers/{0}-index.yaml'.format(namespace)
# Compute which packages needs to be updated in the cache
- misc_cache = spack.caches.misc_cache()
+ misc_cache = spack.caches.misc_cache
index_mtime = misc_cache.mtime(cache_filename)
needs_update = [
@@ -309,7 +309,7 @@ def make_tag_index_cache(packages_path, namespace):
cache_filename = 'tags/{0}-index.json'.format(namespace)
# Compute which packages needs to be updated in the cache
- misc_cache = spack.caches.misc_cache()
+ misc_cache = spack.caches.misc_cache
index_mtime = misc_cache.mtime(cache_filename)
needs_update = [
@@ -1079,60 +1079,60 @@ def create_repo(root, namespace=None):
return full_path, namespace
-#: Singleton repo path instance
-_path = None
-
-
-def set_path(repo):
- """Set the path() singleton to a specific value.
-
- Overwrite _path and register it as an importer in sys.meta_path if
- it is a ``Repo`` or ``RepoPath``.
- """
- global _path
- _path = repo
-
- # make the new repo_path an importer if needed
- append = isinstance(repo, (Repo, RepoPath))
- if append:
- sys.meta_path.append(_path)
- return append
-
-
-def path():
+def _path():
"""Get the singleton RepoPath instance for Spack.
Create a RepoPath, add it to sys.meta_path, and return it.
TODO: consider not making this a singleton.
"""
- if _path is None:
- repo_dirs = spack.config.get('repos')
- if not repo_dirs:
- raise NoRepoConfiguredError(
- "Spack configuration contains no package repositories.")
- set_path(RepoPath(*repo_dirs))
+ repo_dirs = spack.config.get('repos')
+ if not repo_dirs:
+ raise NoRepoConfiguredError(
+ "Spack configuration contains no package repositories.")
- return _path
+ path = RepoPath(*repo_dirs)
+ sys.meta_path.append(path)
+ return path
+
+
+#: Singleton repo path instance
+path = llnl.util.lang.Singleton(_path)
def get(spec):
"""Convenience wrapper around ``spack.repo.get()``."""
- return path().get(spec)
+ return path.get(spec)
def all_package_names():
"""Convenience wrapper around ``spack.repo.all_package_names()``."""
- return path().all_package_names()
+ return path.all_package_names()
+
+
+def set_path(repo):
+ """Set the path singleton to a specific value.
+
+ Overwrite ``path`` and register it as an importer in
+ ``sys.meta_path`` if it is a ``Repo`` or ``RepoPath``.
+ """
+ global path
+ path = repo
+
+ # make the new repo_path an importer if needed
+ append = isinstance(repo, (Repo, RepoPath))
+ if append:
+ sys.meta_path.append(repo)
+ return append
@contextmanager
def swap(repo_path):
"""Temporarily use another RepoPath."""
- global _path
+ global path
# swap out _path for repo_path
- saved = _path
+ saved = path
remove_from_meta = set_path(repo_path)
yield
@@ -1140,7 +1140,7 @@ def swap(repo_path):
# restore _path and sys.meta_path
if remove_from_meta:
sys.meta_path.remove(repo_path)
- _path = saved
+ path = saved
class RepoError(spack.error.SpackError):
diff --git a/lib/spack/spack/reporters/junit.py b/lib/spack/spack/reporters/junit.py
index e8b69f8240..2072cf7576 100644
--- a/lib/spack/spack/reporters/junit.py
+++ b/lib/spack/spack/reporters/junit.py
@@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
+import os
import os.path
import spack.build_environment
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index e81d913631..596cef1306 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1228,7 +1228,7 @@ class Spec(object):
"""Internal package call gets only the class object for a package.
Use this to just get package metadata.
"""
- return spack.repo.path().get_pkg_class(self.fullname)
+ return spack.repo.path.get_pkg_class(self.fullname)
@property
def virtual(self):
@@ -1244,7 +1244,7 @@ class Spec(object):
@staticmethod
def is_virtual(name):
"""Test if a name is virtual without requiring a Spec."""
- return (name is not None) and (not spack.repo.path().exists(name))
+ return (name is not None) and (not spack.repo.path.exists(name))
@property
def concrete(self):
@@ -1402,7 +1402,7 @@ class Spec(object):
@property
def prefix(self):
if self._prefix is None:
- self.prefix = spack.store.store().layout.path_for_spec(self)
+ self.prefix = spack.store.layout.path_for_spec(self)
return self._prefix
@prefix.setter
@@ -1675,7 +1675,7 @@ class Spec(object):
# still need to select a concrete package later.
if not self.virtual:
import spack.concretize
- concretizer = spack.concretize.concretizer()
+ concretizer = spack.concretize.concretizer
changed |= any(
(concretizer.concretize_architecture(self),
concretizer.concretize_compiler(self),
@@ -1744,7 +1744,7 @@ class Spec(object):
# Get a list of possible replacements in order of
# preference.
import spack.concretize
- concretizer = spack.concretize.concretizer()
+ concretizer = spack.concretize.concretizer
candidates = concretizer.choose_virtual_or_external(spec)
# Try the replacements in order, skipping any that cause
@@ -1849,7 +1849,7 @@ class Spec(object):
# we can do it as late as possible to allow as much
# compatibility across repositories as possible.
if s.namespace is None:
- s.namespace = spack.repo.path().repo_for_pkg(s.name).namespace
+ s.namespace = spack.repo.path.repo_for_pkg(s.name).namespace
if s.concrete:
continue
@@ -3107,7 +3107,7 @@ class Spec(object):
elif named_str == 'SPACK_ROOT':
out.write(fmt % token_transform(spack.paths.prefix))
elif named_str == 'SPACK_INSTALL':
- out.write(fmt % token_transform(spack.store.store().root))
+ out.write(fmt % token_transform(spack.store.root))
elif named_str == 'PREFIX':
out.write(fmt % token_transform(self.prefix))
elif named_str.startswith('HASH'):
@@ -3149,7 +3149,7 @@ class Spec(object):
if not self.concrete:
return None
try:
- record = spack.store.store().db.get_record(self)
+ record = spack.store.db.get_record(self)
return record.installed
except KeyError:
return None
@@ -3159,7 +3159,7 @@ class Spec(object):
if not self.concrete:
return None
try:
- record = spack.store.store().db.get_record(self)
+ record = spack.store.db.get_record(self)
return record.explicit
except KeyError:
return None
@@ -3382,7 +3382,7 @@ class SpecParser(spack.parse.Parser):
def spec_by_hash(self):
self.expect(ID)
- specs = spack.store.store().db.query()
+ specs = spack.store.db.query()
matches = [spec for spec in specs if
spec.dag_hash()[:len(self.token.value)] == self.token.value]
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index c4acebf1de..9c384d8aec 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -408,7 +408,7 @@ class Stage(object):
url, digest, expand=expand, extension=extension))
if self.default_fetcher.cachable:
fetchers.insert(
- 0, spack.caches.fetch_cache().fetcher(
+ 0, spack.caches.fetch_cache.fetcher(
self.mirror_path, digest, expand=expand,
extension=extension))
@@ -455,7 +455,7 @@ class Stage(object):
self.fetcher.check()
def cache_local(self):
- spack.caches.fetch_cache().store(self.fetcher, self.mirror_path)
+ spack.caches.fetch_cache.store(self.fetcher, self.mirror_path)
def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded
diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py
index 9bd8166c16..f5111e7f46 100644
--- a/lib/spack/spack/store.py
+++ b/lib/spack/spack/store.py
@@ -43,6 +43,9 @@ configuration.
"""
import os
+
+import llnl.util.lang
+
import spack.paths
import spack.config
import spack.util.path
@@ -72,7 +75,7 @@ class Store(object):
hash_length (int): length of the hashes used in the directory
layout; spec hash suffixes will be truncated to this length
"""
- def __init__(self, root, path_scheme, hash_length):
+ def __init__(self, root, path_scheme=None, hash_length=None):
self.root = root
self.db = spack.database.Database(root)
self.layout = spack.directory_layout.YamlDirectoryLayout(
@@ -85,19 +88,21 @@ class Store(object):
return self.db.reindex(self.layout)
-#: Singleton store instance
-_store = None
+def _store():
+ """Get the singleton store instance."""
+ root = spack.config.get('config:install_tree', default_root)
+ root = spack.util.path.canonicalize_path(root)
+ return Store(root,
+ spack.config.get('config:install_path_scheme'),
+ spack.config.get('config:install_hash_length'))
-def store():
- """Get the singleton store instance."""
- global _store
- if _store is None:
- root = spack.config.get('config:install_tree', default_root)
- root = spack.util.path.canonicalize_path(root)
+#: Singleton store instance
+store = llnl.util.lang.Singleton(_store)
- _store = Store(root,
- spack.config.get('config:install_path_scheme'),
- spack.config.get('config:install_hash_length'))
- return _store
+# convenience accessors for parts of the singleton store
+root = llnl.util.lang.LazyReference(lambda: store.root)
+db = llnl.util.lang.LazyReference(lambda: store.db)
+layout = llnl.util.lang.LazyReference(lambda: store.layout)
+extensions = llnl.util.lang.LazyReference(lambda: store.extensions)
diff --git a/lib/spack/spack/test/cmd/clean.py b/lib/spack/spack/test/cmd/clean.py
index 3e189531ff..cb4339c333 100644
--- a/lib/spack/spack/test/cmd/clean.py
+++ b/lib/spack/spack/test/cmd/clean.py
@@ -44,9 +44,9 @@ def mock_calls_for_clean(monkeypatch):
monkeypatch.setattr(spack.package.PackageBase, 'do_clean', Counter())
monkeypatch.setattr(spack.stage, 'purge', Counter())
monkeypatch.setattr(
- spack.caches._fetch_cache, 'destroy', Counter(), raising=False)
+ spack.caches.fetch_cache, 'destroy', Counter(), raising=False)
monkeypatch.setattr(
- spack.caches._misc_cache, 'destroy', Counter())
+ spack.caches.misc_cache, 'destroy', Counter())
@pytest.mark.usefixtures(
@@ -69,5 +69,5 @@ def test_function_calls(command_line, counters):
# number of times
assert spack.package.PackageBase.do_clean.call_count == counters[0]
assert spack.stage.purge.call_count == counters[1]
- assert spack.caches.fetch_cache().destroy.call_count == counters[2]
- assert spack.caches.misc_cache().destroy.call_count == counters[3]
+ assert spack.caches.fetch_cache.destroy.call_count == counters[2]
+ assert spack.caches.misc_cache.destroy.call_count == counters[3]
diff --git a/lib/spack/spack/test/cmd/dependencies.py b/lib/spack/spack/test/cmd/dependencies.py
index f211737e4b..015eac589c 100644
--- a/lib/spack/spack/test/cmd/dependencies.py
+++ b/lib/spack/spack/test/cmd/dependencies.py
@@ -60,7 +60,7 @@ def test_immediate_installed_dependencies(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
- expected = set([spack.store.store().db.query_one(s).dag_hash(7)
+ expected = set([spack.store.db.query_one(s).dag_hash(7)
for s in ['mpich', 'callpath^mpich']])
assert expected == hashes
@@ -74,7 +74,7 @@ def test_transitive_installed_dependencies(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
- expected = set([spack.store.store().db.query_one(s).dag_hash(7)
+ expected = set([spack.store.db.query_one(s).dag_hash(7)
for s in ['zmpi', 'callpath^zmpi', 'fake',
'dyninst', 'libdwarf', 'libelf']])
diff --git a/lib/spack/spack/test/cmd/dependents.py b/lib/spack/spack/test/cmd/dependents.py
index 6d85596c37..c6a5b01111 100644
--- a/lib/spack/spack/test/cmd/dependents.py
+++ b/lib/spack/spack/test/cmd/dependents.py
@@ -58,10 +58,10 @@ def test_immediate_installed_dependents(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
- expected = set([spack.store.store().db.query_one(s).dag_hash(7)
+ expected = set([spack.store.db.query_one(s).dag_hash(7)
for s in ['dyninst', 'libdwarf']])
- libelf = spack.store.store().db.query_one('libelf')
+ libelf = spack.store.db.query_one('libelf')
expected = set([d.dag_hash(7) for d in libelf.dependents()])
assert expected == hashes
@@ -75,7 +75,7 @@ def test_transitive_installed_dependents(mock_packages, database):
lines = [l for l in out.strip().split('\n') if not l.startswith('--')]
hashes = set([re.split(r'\s+', l)[0] for l in lines])
- expected = set([spack.store.store().db.query_one(s).dag_hash(7)
+ expected = set([spack.store.db.query_one(s).dag_hash(7)
for s in ['zmpi', 'callpath^zmpi', 'mpileaks^zmpi']])
assert expected == hashes
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index 5635fbdf09..88f0247cfa 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -25,6 +25,7 @@
import argparse
import os
import filecmp
+from six.moves import builtins
import pytest
@@ -316,28 +317,23 @@ def test_junit_output_with_failures(tmpdir, exc_typename, msg):
@pytest.mark.disable_clean_stage_check
-@pytest.mark.usefixtures(
- 'mock_packages', 'mock_archive', 'mock_fetch', 'config', 'install_mockery'
-)
@pytest.mark.parametrize('exc_typename,msg', [
- ('RuntimeError', 'something weird happened'),
+# ('RuntimeError', 'something weird happened'),
('KeyboardInterrupt', 'Ctrl-C strikes again')
])
-def test_junit_output_with_errors(tmpdir, monkeypatch, exc_typename, msg):
+def test_junit_output_with_errors(
+ exc_typename, msg,
+ mock_packages, mock_archive, mock_fetch, install_mockery,
+ config, tmpdir, monkeypatch):
def just_throw(*args, **kwargs):
- from six.moves import builtins
exc_type = getattr(builtins, exc_typename)
raise exc_type(msg)
monkeypatch.setattr(spack.package.PackageBase, 'do_install', just_throw)
with tmpdir.as_cwd():
- install(
- '--log-format=junit', '--log-file=test.xml',
- 'libdwarf',
- fail_on_error=False
- )
+ install('--log-format=junit', '--log-file=test.xml', 'libdwarf')
files = tmpdir.listdir()
filename = tmpdir.join('test.xml')
@@ -387,7 +383,7 @@ def test_extra_files_are_archived(mock_packages, mock_archive, mock_fetch,
install('archive-files')
archive_dir = os.path.join(
- spack.store.store().layout.metadata_path(s), 'archived-files'
+ spack.store.layout.metadata_path(s), 'archived-files'
)
config_log = os.path.join(archive_dir, 'config.log')
assert os.path.exists(config_log)
diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py
index d81ab13a57..1c187431d1 100644
--- a/lib/spack/spack/test/cmd/uninstall.py
+++ b/lib/spack/spack/test/cmd/uninstall.py
@@ -61,7 +61,7 @@ def test_recursive_uninstall():
"""Test recursive uninstall."""
uninstall('-y', '-a', '--dependents', 'callpath')
- all_specs = spack.store.store().layout.all_specs()
+ all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 8
# query specs with multiple configurations
mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 2a7943c894..028bc23583 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -162,7 +162,7 @@ class TestConcretize(object):
"""Make sure insufficient versions of MPI are not in providers list when
we ask for some advanced version.
"""
- repo = spack.repo.path()
+ repo = spack.repo.path
assert not any(
s.satisfies('mpich2@:1.0') for s in repo.providers_for('mpi@2.1')
)
@@ -182,7 +182,7 @@ class TestConcretize(object):
def test_provides_handles_multiple_providers_of_same_vesrion(self):
"""
"""
- providers = spack.repo.path().providers_for('mpi@3.0')
+ providers = spack.repo.path.providers_for('mpi@3.0')
# Note that providers are repo-specific, so we don't misinterpret
# providers, but vdeps are not namespace-specific, so we can
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
index a7ac13a64a..6156b53ef0 100644
--- a/lib/spack/spack/test/concretize_preferences.py
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -42,7 +42,7 @@ def concretize_scope(config, tmpdir):
config.pop_scope()
spack.package_prefs.PackagePrefs.clear_caches()
- spack.repo.path()._provider_index = None
+ spack.repo.path._provider_index = None
def concretize(abstract_spec):
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index 5893e13a9c..0d2359c1ab 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -61,15 +61,15 @@ config_override_list = {
@pytest.fixture()
def config(tmpdir):
"""Mocks the configuration scope."""
- real_configuration = spack.config._configuration
- scopes = [spack.config.ConfigScope(name, str(tmpdir.join(name)))
- for name in ['low', 'high']]
- config = spack.config.Configuration(*scopes)
- spack.config._configuration = config
+ real_configuration = spack.config.config
- yield config
+ spack.config.config = spack.config.Configuration(
+ *[spack.config.ConfigScope(name, str(tmpdir.join(name)))
+ for name in ['low', 'high']])
- spack.config._configuration = real_configuration
+ yield spack.config.config
+
+ spack.config.config = real_configuration
@pytest.fixture()
@@ -242,7 +242,7 @@ def test_write_key_to_disk(config, compiler_specs):
spack.config.set('compilers', b_comps['compilers'], scope='high')
# Clear caches so we're forced to read from disk.
- spack.config.config().clear_caches()
+ spack.config.config.clear_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
@@ -255,7 +255,7 @@ def test_write_to_same_priority_file(config, compiler_specs):
spack.config.set('compilers', b_comps['compilers'], scope='low')
# Clear caches so we're forced to read from disk.
- spack.config.config().clear_caches()
+ spack.config.config.clear_caches()
# Same check again, to ensure consistency.
check_compiler_config(a_comps['compilers'], *compiler_specs.a)
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index c405d1246a..a003df40c8 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -155,7 +155,7 @@ def mock_fetch_cache(monkeypatch):
def __str__(self):
return "[mock fetch cache]"
- monkeypatch.setattr(spack.caches, '_fetch_cache', MockCache())
+ monkeypatch.setattr(spack.caches, 'fetch_cache', MockCache())
# FIXME: The lines below should better be added to a fixture with
@@ -242,148 +242,104 @@ def config(configuration_dir):
# Set up a mock config scope
spack.package_prefs.PackagePrefs.clear_caches()
- real_configuration = spack.config._configuration
+ real_configuration = spack.config.config
- scopes = [
- spack.config.ConfigScope(name, str(configuration_dir.join(name)))
- for name in ['site', 'system', 'user']]
- config = spack.config.Configuration(*scopes)
- spack.config._configuration = config
+ spack.config.config = spack.config.Configuration(
+ *[spack.config.ConfigScope(name, str(configuration_dir.join(name)))
+ for name in ['site', 'system', 'user']])
- yield config
+ yield spack.config.config
- spack.config._configuration = real_configuration
+ spack.config.config = real_configuration
spack.package_prefs.PackagePrefs.clear_caches()
+def _populate(mock_db):
+ """Populate a mock database with packages.
+
+ Here is what the mock DB looks like:
+
+ o mpileaks o mpileaks' o mpileaks''
+ |\ |\ |\
+ | o callpath | o callpath' | o callpath''
+ |/| |/| |/|
+ o | mpich o | mpich2 o | zmpi
+ | | o | fake
+ | | |
+ | |______________/
+ | .____________/
+ |/
+ o dyninst
+ |\
+ | o libdwarf
+ |/
+ o libelf
+ """
+ def _install(spec):
+ s = spack.spec.Spec(spec).concretized()
+ pkg = spack.repo.get(s)
+ pkg.do_install(fake=True)
+
+ # Transaction used to avoid repeated writes.
+ with mock_db.write_transaction():
+ _install('mpileaks ^mpich')
+ _install('mpileaks ^mpich2')
+ _install('mpileaks ^zmpi')
+ _install('externaltest')
+
+
@pytest.fixture(scope='module')
def database(tmpdir_factory, mock_packages, config):
"""Creates a mock database with some packages installed note that
the ref count for dyninst here will be 3, as it's recycled
across each install.
"""
-
- # Here is what the mock DB looks like:
- #
- # o mpileaks o mpileaks' o mpileaks''
- # |\ |\ |\
- # | o callpath | o callpath' | o callpath''
- # |/| |/| |/|
- # o | mpich o | mpich2 o | zmpi
- # | | o | fake
- # | | |
- # | |______________/
- # | .____________/
- # |/
- # o dyninst
- # |\
- # | o libdwarf
- # |/
- # o libelf
+ # save the real store
+ real_store = spack.store.store
# Make a fake install directory
install_path = tmpdir_factory.mktemp('install_for_database')
- spack_install_path = spack.store.store().root
-
- spack.store.store().root = str(install_path)
- install_layout = spack.directory_layout.YamlDirectoryLayout(
- str(install_path))
- spack_install_layout = spack.store.store().layout
- spack.store.store().layout = install_layout
-
- # Make fake database and fake install directory.
- install_db = spack.database.Database(str(install_path))
- spack_install_db = spack.store.store().db
- spack.store.store().db = install_db
-
- Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
- Database = collections.namedtuple(
- 'Database', ['real', 'mock', 'install', 'uninstall', 'refresh'])
- real = Entry(
- path=spack_install_path,
- layout=spack_install_layout,
- db=spack_install_db)
- mock = Entry(path=install_path, layout=install_layout, db=install_db)
+ # Make fake store (database and install layout)
+ tmp_store = spack.store.Store(str(install_path))
+ spack.store.store = tmp_store
- def _install(spec):
- s = spack.spec.Spec(spec)
- s.concretize()
- pkg = spack.repo.get(s)
- pkg.do_install(fake=True)
-
- def _uninstall(spec):
- spec.package.do_uninstall(spec)
-
- def _refresh():
- with spack.store.store().db.write_transaction():
- for spec in spack.store.store().db.query():
- _uninstall(spec)
- _install('mpileaks ^mpich')
- _install('mpileaks ^mpich2')
- _install('mpileaks ^zmpi')
- _install('externaltest')
-
- t = Database(
- real=real,
- mock=mock,
- install=_install,
- uninstall=_uninstall,
- refresh=_refresh)
-
- # Transaction used to avoid repeated writes.
- with spack.store.store().db.write_transaction():
- t.install('mpileaks ^mpich')
- t.install('mpileaks ^mpich2')
- t.install('mpileaks ^zmpi')
- t.install('externaltest')
+ _populate(tmp_store.db)
- yield t
+ yield tmp_store.db
- with spack.store.store().db.write_transaction():
- for spec in spack.store.store().db.query():
+ with tmp_store.db.write_transaction():
+ for spec in tmp_store.db.query():
if spec.package.installed:
- t.uninstall(spec)
+ PackageBase.uninstall_by_spec(spec, force=True)
else:
- spack.store.store().db.remove(spec)
+ tmp_store.db.remove(spec)
install_path.remove(rec=1)
- spack.store.store().root = spack_install_path
- spack.store.store().layout = spack_install_layout
- spack.store.store().db = spack_install_db
+ spack.store.store = real_store
-@pytest.fixture()
-def refresh_db_on_exit(database):
- """"Restores the state of the database after a test."""
- yield
- database.refresh()
+@pytest.fixture(scope='function')
+def mutable_database(database):
+ """For tests that need to modify the database instance."""
+ yield database
+ with database.write_transaction():
+ for spec in spack.store.db.query():
+ PackageBase.uninstall_by_spec(spec, force=True)
+ _populate(database)
-@pytest.fixture()
+@pytest.fixture(scope='function')
def install_mockery(tmpdir, config, mock_packages):
"""Hooks a fake install directory, DB, and stage directory into Spack."""
- layout = spack.store.store().layout
- extensions = spack.store.store().extensions
- db = spack.store.store().db
- new_opt = str(tmpdir.join('opt'))
-
- # Use a fake install directory to avoid conflicts bt/w
- # installed pkgs and mock packages.
- store = spack.store.store()
- store.layout = spack.directory_layout.YamlDirectoryLayout(new_opt)
- store.extensions = spack.directory_layout.YamlExtensionsLayout(
- new_opt, spack.store.store().layout)
- store.db = spack.database.Database(new_opt)
+ real_store = spack.store.store
+ spack.store.store = spack.store.Store(str(tmpdir.join('opt')))
# We use a fake package, so temporarily disable checksumming
with spack.config.override('config:checksum', False):
yield
- # Restore Spack's layout.
- store.layout = layout
- store.extensions = extensions
- store.db = db
+ spack.store.store = real_store
@pytest.fixture()
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index b0ee343095..2e61853349 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -47,16 +47,16 @@ def _print_ref_counts():
recs = []
def add_rec(spec):
- cspecs = spack.store.store().db.query(spec, installed=any)
+ cspecs = spack.store.db.query(spec, installed=any)
if not cspecs:
recs.append("[ %-7s ] %-20s-" % ('', spec))
else:
key = cspecs[0].dag_hash()
- rec = spack.store.store().db.get_record(cspecs[0])
+ rec = spack.store.db.get_record(cspecs[0])
recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count))
- with spack.store.store().db.read_transaction():
+ with spack.store.db.read_transaction():
add_rec('mpileaks ^mpich')
add_rec('callpath ^mpich')
add_rec('mpich')
@@ -79,7 +79,7 @@ def _print_ref_counts():
def _check_merkleiness():
"""Ensure the spack database is a valid merkle graph."""
- all_specs = spack.store.store().db.query(installed=any)
+ all_specs = spack.store.db.query(installed=any)
seen = {}
for spec in all_specs:
@@ -91,10 +91,10 @@ def _check_merkleiness():
assert seen[hash_key] == id(dep)
-def _check_db_sanity(install_db):
+def _check_db_sanity(database):
"""Utiilty function to check db against install layout."""
- pkg_in_layout = sorted(spack.store.store().layout.all_specs())
- actual = sorted(install_db.query())
+ pkg_in_layout = sorted(spack.store.layout.all_specs())
+ actual = sorted(database.query())
externals = sorted([x for x in actual if x.external])
nexpected = len(pkg_in_layout) + len(externals)
@@ -109,19 +109,19 @@ def _check_db_sanity(install_db):
_check_merkleiness()
-def _check_remove_and_add_package(install_db, spec):
+def _check_remove_and_add_package(database, spec):
"""Remove a spec from the DB, then add it and make sure everything's
still ok once it is added. This checks that it was
removed, that it's back when added again, and that ref
counts are consistent.
"""
- original = install_db.query()
- install_db._check_ref_counts()
+ original = database.query()
+ database._check_ref_counts()
# Remove spec
- concrete_spec = install_db.remove(spec)
- install_db._check_ref_counts()
- remaining = install_db.query()
+ concrete_spec = database.remove(spec)
+ database._check_ref_counts()
+ remaining = database.query()
# ensure spec we removed is gone
assert len(original) - 1 == len(remaining)
@@ -129,14 +129,14 @@ def _check_remove_and_add_package(install_db, spec):
assert concrete_spec not in remaining
# add it back and make sure everything is ok.
- install_db.add(concrete_spec, spack.store.store().layout)
- installed = install_db.query()
+ database.add(concrete_spec, spack.store.layout)
+ installed = database.query()
assert concrete_spec in installed
assert installed == original
# sanity check against direcory layout and check ref counts.
- _check_db_sanity(install_db)
- install_db._check_ref_counts()
+ _check_db_sanity(database)
+ database._check_ref_counts()
def _mock_install(spec):
@@ -147,7 +147,7 @@ def _mock_install(spec):
def _mock_remove(spec):
- specs = spack.store.store().db.query(spec)
+ specs = spack.store.db.query(spec)
assert len(specs) == 1
spec = specs[0]
spec.package.do_uninstall(spec)
@@ -156,8 +156,7 @@ def _mock_remove(spec):
def test_default_queries(database):
# Testing a package whose name *doesn't* start with 'lib'
# to ensure the library has 'lib' prepended to the name
- install_db = database.mock.db
- rec = install_db.get_record('zmpi')
+ rec = database.get_record('zmpi')
spec = rec.spec
@@ -176,8 +175,7 @@ def test_default_queries(database):
# Testing a package whose name *does* start with 'lib'
# to ensure the library doesn't have a double 'lib' prefix
- install_db = database.mock.db
- rec = install_db.get_record('libelf')
+ rec = database.get_record('libelf')
spec = rec.spec
@@ -197,16 +195,15 @@ def test_default_queries(database):
def test_005_db_exists(database):
"""Make sure db cache file exists after creating."""
- install_path = database.mock.path
- index_file = install_path.join('.spack-db', 'index.json')
- lock_file = install_path.join('.spack-db', 'lock')
+ index_file = os.path.join(database.root, '.spack-db', 'index.json')
+ lock_file = os.path.join(database.root, '.spack-db', 'lock')
assert os.path.exists(str(index_file))
assert os.path.exists(str(lock_file))
def test_010_all_install_sanity(database):
"""Ensure that the install layout reflects what we think it does."""
- all_specs = spack.store.store().layout.all_specs()
+ all_specs = spack.store.layout.all_specs()
assert len(all_specs) == 14
# Query specs with multiple configurations
@@ -241,12 +238,12 @@ def test_010_all_install_sanity(database):
def test_015_write_and_read(database):
# write and read DB
- with spack.store.store().db.write_transaction():
- specs = spack.store.store().db.query()
- recs = [spack.store.store().db.get_record(s) for s in specs]
+ with spack.store.db.write_transaction():
+ specs = spack.store.db.query()
+ recs = [spack.store.db.get_record(s) for s in specs]
for spec, rec in zip(specs, recs):
- new_rec = spack.store.store().db.get_record(spec)
+ new_rec = spack.store.db.get_record(spec)
assert new_rec.ref_count == rec.ref_count
assert new_rec.spec == rec.spec
assert new_rec.path == rec.path
@@ -255,23 +252,20 @@ def test_015_write_and_read(database):
def test_020_db_sanity(database):
"""Make sure query() returns what's actually in the db."""
- install_db = database.mock.db
- _check_db_sanity(install_db)
+ _check_db_sanity(database)
def test_025_reindex(database):
"""Make sure reindex works and ref counts are valid."""
- install_db = database.mock.db
- spack.store.store().reindex()
- _check_db_sanity(install_db)
+ spack.store.store.reindex()
+ _check_db_sanity(database)
-def test_030_db_sanity_from_another_process(database, refresh_db_on_exit):
- install_db = database.mock.db
-
+def test_030_db_sanity_from_another_process(mutable_database):
def read_and_modify():
- _check_db_sanity(install_db) # check that other process can read DB
- with install_db.write_transaction():
+ # check that other process can read DB
+ _check_db_sanity(mutable_database)
+ with mutable_database.write_transaction():
_mock_remove('mpileaks ^zmpi')
p = multiprocessing.Process(target=read_and_modify, args=())
@@ -279,181 +273,166 @@ def test_030_db_sanity_from_another_process(database, refresh_db_on_exit):
p.join()
# ensure child process change is visible in parent process
- with install_db.read_transaction():
- assert len(install_db.query('mpileaks ^zmpi')) == 0
+ with mutable_database.read_transaction():
+ assert len(mutable_database.query('mpileaks ^zmpi')) == 0
def test_040_ref_counts(database):
"""Ensure that we got ref counts right when we read the DB."""
- install_db = database.mock.db
- install_db._check_ref_counts()
+ database._check_ref_counts()
def test_050_basic_query(database):
"""Ensure querying database is consistent with what is installed."""
- install_db = database.mock.db
# query everything
- assert len(spack.store.store().db.query()) == 16
+ assert len(spack.store.db.query()) == 16
# query specs with multiple configurations
- mpileaks_specs = install_db.query('mpileaks')
- callpath_specs = install_db.query('callpath')
- mpi_specs = install_db.query('mpi')
+ mpileaks_specs = database.query('mpileaks')
+ callpath_specs = database.query('callpath')
+ mpi_specs = database.query('mpi')
assert len(mpileaks_specs) == 3
assert len(callpath_specs) == 3
assert len(mpi_specs) == 3
# query specs with single configurations
- dyninst_specs = install_db.query('dyninst')
- libdwarf_specs = install_db.query('libdwarf')
- libelf_specs = install_db.query('libelf')
+ dyninst_specs = database.query('dyninst')
+ libdwarf_specs = database.query('libdwarf')
+ libelf_specs = database.query('libelf')
assert len(dyninst_specs) == 1
assert len(libdwarf_specs) == 1
assert len(libelf_specs) == 1
# Query by dependency
- assert len(install_db.query('mpileaks ^mpich')) == 1
- assert len(install_db.query('mpileaks ^mpich2')) == 1
- assert len(install_db.query('mpileaks ^zmpi')) == 1
+ assert len(database.query('mpileaks ^mpich')) == 1
+ assert len(database.query('mpileaks ^mpich2')) == 1
+ assert len(database.query('mpileaks ^zmpi')) == 1
# Query by date
- assert len(install_db.query(start_date=datetime.datetime.min)) == 16
- assert len(install_db.query(start_date=datetime.datetime.max)) == 0
- assert len(install_db.query(end_date=datetime.datetime.min)) == 0
- assert len(install_db.query(end_date=datetime.datetime.max)) == 16
+ assert len(database.query(start_date=datetime.datetime.min)) == 16
+ assert len(database.query(start_date=datetime.datetime.max)) == 0
+ assert len(database.query(end_date=datetime.datetime.min)) == 0
+ assert len(database.query(end_date=datetime.datetime.max)) == 16
def test_060_remove_and_add_root_package(database):
- install_db = database.mock.db
- _check_remove_and_add_package(install_db, 'mpileaks ^mpich')
+ _check_remove_and_add_package(database, 'mpileaks ^mpich')
def test_070_remove_and_add_dependency_package(database):
- install_db = database.mock.db
- _check_remove_and_add_package(install_db, 'dyninst')
+ _check_remove_and_add_package(database, 'dyninst')
def test_080_root_ref_counts(database):
- install_db = database.mock.db
- rec = install_db.get_record('mpileaks ^mpich')
+ rec = database.get_record('mpileaks ^mpich')
# Remove a top-level spec from the DB
- install_db.remove('mpileaks ^mpich')
+ database.remove('mpileaks ^mpich')
# record no longer in DB
- assert install_db.query('mpileaks ^mpich', installed=any) == []
+ assert database.query('mpileaks ^mpich', installed=any) == []
# record's deps have updated ref_counts
- assert install_db.get_record('callpath ^mpich').ref_count == 0
- assert install_db.get_record('mpich').ref_count == 1
+ assert database.get_record('callpath ^mpich').ref_count == 0
+ assert database.get_record('mpich').ref_count == 1
# Put the spec back
- install_db.add(rec.spec, spack.store.store().layout)
+ database.add(rec.spec, spack.store.layout)
# record is present again
- assert len(install_db.query('mpileaks ^mpich', installed=any)) == 1
+ assert len(database.query('mpileaks ^mpich', installed=any)) == 1
# dependencies have ref counts updated
- assert install_db.get_record('callpath ^mpich').ref_count == 1
- assert install_db.get_record('mpich').ref_count == 2
+ assert database.get_record('callpath ^mpich').ref_count == 1
+ assert database.get_record('mpich').ref_count == 2
def test_090_non_root_ref_counts(database):
- install_db = database.mock.db
-
- install_db.get_record('mpileaks ^mpich')
- install_db.get_record('callpath ^mpich')
+ database.get_record('mpileaks ^mpich')
+ database.get_record('callpath ^mpich')
# "force remove" a non-root spec from the DB
- install_db.remove('callpath ^mpich')
+ database.remove('callpath ^mpich')
# record still in DB but marked uninstalled
- assert install_db.query('callpath ^mpich', installed=True) == []
- assert len(install_db.query('callpath ^mpich', installed=any)) == 1
+ assert database.query('callpath ^mpich', installed=True) == []
+ assert len(database.query('callpath ^mpich', installed=any)) == 1
# record and its deps have same ref_counts
- assert install_db.get_record(
+ assert database.get_record(
'callpath ^mpich', installed=any
).ref_count == 1
- assert install_db.get_record('mpich').ref_count == 2
+ assert database.get_record('mpich').ref_count == 2
# remove only dependent of uninstalled callpath record
- install_db.remove('mpileaks ^mpich')
+ database.remove('mpileaks ^mpich')
# record and parent are completely gone.
- assert install_db.query('mpileaks ^mpich', installed=any) == []
- assert install_db.query('callpath ^mpich', installed=any) == []
+ assert database.query('mpileaks ^mpich', installed=any) == []
+ assert database.query('callpath ^mpich', installed=any) == []
# mpich ref count updated properly.
- mpich_rec = install_db.get_record('mpich')
+ mpich_rec = database.get_record('mpich')
assert mpich_rec.ref_count == 0
def test_100_no_write_with_exception_on_remove(database):
- install_db = database.mock.db
-
def fail_while_writing():
- with install_db.write_transaction():
+ with database.write_transaction():
_mock_remove('mpileaks ^zmpi')
raise Exception()
- with install_db.read_transaction():
- assert len(install_db.query('mpileaks ^zmpi', installed=any)) == 1
+ with database.read_transaction():
+ assert len(database.query('mpileaks ^zmpi', installed=any)) == 1
with pytest.raises(Exception):
fail_while_writing()
# reload DB and make sure zmpi is still there.
- with install_db.read_transaction():
- assert len(install_db.query('mpileaks ^zmpi', installed=any)) == 1
+ with database.read_transaction():
+ assert len(database.query('mpileaks ^zmpi', installed=any)) == 1
def test_110_no_write_with_exception_on_install(database):
- install_db = database.mock.db
-
def fail_while_writing():
- with install_db.write_transaction():
+ with database.write_transaction():
_mock_install('cmake')
raise Exception()
- with install_db.read_transaction():
- assert install_db.query('cmake', installed=any) == []
+ with database.read_transaction():
+ assert database.query('cmake', installed=any) == []
with pytest.raises(Exception):
fail_while_writing()
# reload DB and make sure cmake was not written.
- with install_db.read_transaction():
- assert install_db.query('cmake', installed=any) == []
-
+ with database.read_transaction():
+ assert database.query('cmake', installed=any) == []
-def test_115_reindex_with_packages_not_in_repo(database, refresh_db_on_exit):
- install_db = database.mock.db
+def test_115_reindex_with_packages_not_in_repo(mutable_database):
# Dont add any package definitions to this repository, the idea is that
# packages should not have to be defined in the repository once they
# are installed
with spack.repo.swap(MockPackageMultiRepo([])):
- spack.store.store().reindex()
- _check_db_sanity(install_db)
+ spack.store.store.reindex()
+ _check_db_sanity(mutable_database)
def test_external_entries_in_db(database):
- install_db = database.mock.db
-
- rec = install_db.get_record('mpileaks ^zmpi')
+ rec = database.get_record('mpileaks ^zmpi')
assert rec.spec.external_path is None
assert rec.spec.external_module is None
- rec = install_db.get_record('externaltool')
+ rec = database.get_record('externaltool')
assert rec.spec.external_path == '/path/to/external_tool'
assert rec.spec.external_module is None
assert rec.explicit is False
rec.spec.package.do_install(fake=True, explicit=True)
- rec = install_db.get_record('externaltool')
+ rec = database.get_record('externaltool')
assert rec.spec.external_path == '/path/to/external_tool'
assert rec.spec.external_module is None
assert rec.explicit is True
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index de4000b651..ac12381e75 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -104,7 +104,7 @@ def test_read_and_write_spec(
layout.
"""
layout, tmpdir = layout_and_dir
- packages = list(spack.repo.path().all_packages())[:max_packages]
+ packages = list(spack.repo.path.all_packages())[:max_packages]
for pkg in packages:
if pkg.name.startswith('external'):
@@ -226,7 +226,7 @@ def test_handle_unknown_package(
def test_find(layout_and_dir, config, mock_packages):
"""Test that finding specs within an install layout works."""
layout, _ = layout_and_dir
- packages = list(spack.repo.path().all_packages())[:max_packages]
+ packages = list(spack.repo.path.all_packages())[:max_packages]
# Create install prefixes for all packages in the list
installed_specs = {}
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index 849ebc9053..3f56b7c6ba 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -65,7 +65,7 @@ def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set."""
defaults = spack.config.get('packages', scope='defaults')
default_providers = defaults['all']['providers']
- providers = spack.repo.path().provider_index.providers
+ providers = spack.repo.path.provider_index.providers
for provider in providers:
assert provider in default_providers
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index 5f5c1fd3ee..168fb4fc68 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -261,7 +261,7 @@ class TestSpecSyntax(object):
@pytest.mark.db
def test_spec_by_hash(self, database):
- specs = database.mock.db.query()
+ specs = database.query()
assert len(specs) # make sure something's in the DB
for spec in specs:
@@ -269,9 +269,9 @@ class TestSpecSyntax(object):
@pytest.mark.db
def test_dep_spec_by_hash(self, database):
- mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
- zmpi = database.mock.db.query_one('zmpi')
- fake = database.mock.db.query_one('fake')
+ mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
+ zmpi = database.query_one('zmpi')
+ fake = database.query_one('fake')
assert 'fake' in mpileaks_zmpi
assert 'zmpi' in mpileaks_zmpi
@@ -297,8 +297,8 @@ class TestSpecSyntax(object):
@pytest.mark.db
def test_multiple_specs_with_hash(self, database):
- mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
- callpath_mpich2 = database.mock.db.query_one('callpath ^mpich2')
+ mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
+ callpath_mpich2 = database.query_one('callpath ^mpich2')
# name + hash + separate hash
specs = sp.parse('mpileaks /' + mpileaks_zmpi.dag_hash() +
@@ -336,8 +336,8 @@ class TestSpecSyntax(object):
x2 = Spec('a')
x2._hash = 'xx'
x2._concrete = True
- database.mock.db.add(x1, spack.store.store().layout)
- database.mock.db.add(x2, spack.store.store().layout)
+ database.add(x1, spack.store.layout)
+ database.add(x2, spack.store.layout)
# ambiguity in first hash character
self._check_raises(AmbiguousHashError, ['/x'])
@@ -347,11 +347,11 @@ class TestSpecSyntax(object):
@pytest.mark.db
def test_invalid_hash(self, database):
- mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
- zmpi = database.mock.db.query_one('zmpi')
+ mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
+ zmpi = database.query_one('zmpi')
- mpileaks_mpich = database.mock.db.query_one('mpileaks ^mpich')
- mpich = database.mock.db.query_one('mpich')
+ mpileaks_mpich = database.query_one('mpileaks ^mpich')
+ mpich = database.query_one('mpich')
# name + incompatible hash
self._check_raises(InvalidHashError, [
@@ -366,7 +366,7 @@ class TestSpecSyntax(object):
@pytest.mark.db
def test_nonexistent_hash(self, database):
"""Ensure we get errors for nonexistant hashes."""
- specs = database.mock.db.query()
+ specs = database.query()
# This hash shouldn't be in the test DB. What are the odds :)
no_such_hash = 'aaaaaaaaaaaaaaa'
@@ -385,11 +385,11 @@ class TestSpecSyntax(object):
specs only raise errors if constraints cause a contradiction?
"""
- mpileaks_zmpi = database.mock.db.query_one('mpileaks ^zmpi')
- callpath_zmpi = database.mock.db.query_one('callpath ^zmpi')
- dyninst = database.mock.db.query_one('dyninst')
+ mpileaks_zmpi = database.query_one('mpileaks ^zmpi')
+ callpath_zmpi = database.query_one('callpath ^zmpi')
+ dyninst = database.query_one('dyninst')
- mpileaks_mpich2 = database.mock.db.query_one('mpileaks ^mpich2')
+ mpileaks_mpich2 = database.query_one('mpileaks ^mpich2')
redundant_specs = [
# redudant compiler
diff --git a/lib/spack/spack/util/package_hash.py b/lib/spack/spack/util/package_hash.py
index fe5316ca0a..0acb211312 100644
--- a/lib/spack/spack/util/package_hash.py
+++ b/lib/spack/spack/util/package_hash.py
@@ -131,7 +131,7 @@ def package_hash(spec, content=None):
def package_ast(spec):
spec = Spec(spec)
- filename = spack.repo.path().filename_for_package_name(spec.name)
+ filename = spack.repo.path.filename_for_package_name(spec.name)
with open(filename) as f:
text = f.read()
root = ast.parse(text)