summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/cmd/modules/__init__.py27
-rw-r--r--lib/spack/spack/cmd/uninstall.py2
-rw-r--r--lib/spack/spack/config.py2
-rw-r--r--lib/spack/spack/database.py379
-rw-r--r--lib/spack/spack/directory_layout.py33
-rw-r--r--lib/spack/spack/main.py21
-rw-r--r--lib/spack/spack/modules/common.py60
-rw-r--r--lib/spack/spack/package.py17
-rw-r--r--lib/spack/spack/schema/merged.py4
-rw-r--r--lib/spack/spack/schema/upstreams.py40
-rw-r--r--lib/spack/spack/spec.py11
-rw-r--r--lib/spack/spack/store.py27
-rw-r--r--lib/spack/spack/test/cmd/print_shell_vars.py16
-rw-r--r--lib/spack/spack/test/conftest.py24
-rw-r--r--lib/spack/spack/test/database.py177
-rw-r--r--lib/spack/spack/test/directory_layout.py6
-rw-r--r--lib/spack/spack/test/install.py71
-rw-r--r--lib/spack/spack/test/modules/tcl.py17
18 files changed, 769 insertions, 165 deletions
diff --git a/lib/spack/spack/cmd/modules/__init__.py b/lib/spack/spack/cmd/modules/__init__.py
index f62218be90..e3f2e8099b 100644
--- a/lib/spack/spack/cmd/modules/__init__.py
+++ b/lib/spack/spack/cmd/modules/__init__.py
@@ -32,6 +32,11 @@ def setup_parser(subparser):
help='delete the module file tree before refresh',
action='store_true'
)
+ refresh_parser.add_argument(
+ '--upstream-modules',
+ help='generate modules for packages installed upstream',
+ action='store_true'
+ )
arguments.add_common_arguments(
refresh_parser, ['constraint', 'yes_to_all']
)
@@ -125,10 +130,14 @@ def loads(module_type, specs, args, out=sys.stdout):
)
module_cls = spack.modules.module_types[module_type]
- modules = [
- (spec, module_cls(spec).layout.use_name)
- for spec in specs if os.path.exists(module_cls(spec).layout.filename)
- ]
+ modules = list()
+ for spec in specs:
+ if os.path.exists(module_cls(spec).layout.filename):
+ modules.append((spec, module_cls(spec).layout.use_name))
+ elif spec.package.installed_upstream:
+ tty.debug("Using upstream module for {0}".format(spec))
+ module = spack.modules.common.upstream_module(spec, module_type)
+ modules.append((spec, module.use_name))
module_commands = {
'tcl': 'module load ',
@@ -159,6 +168,12 @@ def find(module_type, specs, args):
spec = one_spec_or_raise(specs)
+ if spec.package.installed_upstream:
+ module = spack.modules.common.upstream_module(spec, module_type)
+ if module:
+ print(module.path)
+ return
+
# Check if the module file is present
def module_exists(spec):
writer = spack.modules.module_types[module_type](spec)
@@ -232,6 +247,9 @@ def refresh(module_type, specs, args):
tty.msg('No package matches your query')
return
+ if not args.upstream_modules:
+ specs = list(s for s in specs if not s.package.installed_upstream)
+
if not args.yes_to_all:
msg = 'You are about to regenerate {types} module files for:\n'
tty.msg(msg.format(types=module_type))
@@ -276,6 +294,7 @@ def refresh(module_type, specs, args):
# If we arrived here we have at least one writer
module_type_root = writers[0].layout.dirname()
+ spack.modules.common.generate_module_index(module_type_root, writers)
# Proceed regenerating module files
tty.msg('Regenerating {name} module files'.format(name=module_type))
if os.path.isdir(module_type_root) and args.delete_tree:
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index cf2c7aaafb..cd89f5aba8 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -80,7 +80,7 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
specs_from_cli = []
has_errors = False
for spec in specs:
- matching = spack.store.db.query(spec, hashes=hashes)
+ matching = spack.store.db.query_local(spec, hashes=hashes)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index be0cbc4965..3aa91d6349 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -55,6 +55,7 @@ import spack.schema.repos
import spack.schema.packages
import spack.schema.modules
import spack.schema.config
+import spack.schema.upstreams
from spack.error import SpackError
# Hacked yaml for configuration files preserves line numbers.
@@ -69,6 +70,7 @@ section_schemas = {
'packages': spack.schema.packages.schema,
'modules': spack.schema.modules.schema,
'config': spack.schema.config.schema,
+ 'upstreams': spack.schema.upstreams.schema
}
#: Builtin paths to configuration files in Spack
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index 66cc64abaa..b76ba5722c 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -135,12 +135,23 @@ class InstallRecord(object):
return InstallRecord(spec, **d)
+class ForbiddenLockError(SpackError):
+ """Raised when an upstream DB attempts to acquire a lock"""
+
+
+class ForbiddenLock(object):
+ def __getattribute__(self, name):
+ raise ForbiddenLockError(
+ "Cannot access attribute '{0}' of lock".format(name))
+
+
class Database(object):
"""Per-process lock objects for each install prefix."""
_prefix_locks = {}
- def __init__(self, root, db_dir=None):
+ def __init__(self, root, db_dir=None, upstream_dbs=None,
+ is_upstream=False):
"""Create a Database for Spack installations under ``root``.
A Database is a cache of Specs data from ``$prefix/spec.yaml``
@@ -183,6 +194,13 @@ class Database(object):
if not os.path.exists(self._db_dir):
mkdirp(self._db_dir)
+ self.is_upstream = is_upstream
+
+ if self.is_upstream:
+ self.lock = ForbiddenLock()
+ else:
+ self.lock = Lock(self._lock_path)
+
# initialize rest of state.
self.db_lock_timeout = (
spack.config.get('config:db_lock_timeout') or _db_lock_timeout)
@@ -198,9 +216,16 @@ class Database(object):
default_timeout=self.db_lock_timeout)
self._data = {}
+ self.upstream_dbs = list(upstream_dbs) if upstream_dbs else []
+
# whether there was an error at the start of a read transaction
self._error = None
+ # For testing: if this is true, an exception is thrown when missing
+ # dependencies are detected (rather than just printing a warning
+ # message)
+ self._fail_when_missing_deps = False
+
def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
return WriteTransaction(self.lock, self._read, self._write)
@@ -311,23 +336,56 @@ class Database(object):
spec = spack.spec.Spec.from_node_dict(spec_dict)
return spec
+ def db_for_spec_hash(self, hash_key):
+ with self.read_transaction():
+ if hash_key in self._data:
+ return self
+
+ for db in self.upstream_dbs:
+ if hash_key in db._data:
+ return db
+
+ def query_by_spec_hash(self, hash_key, data=None):
+ if data and hash_key in data:
+ return False, data[hash_key]
+ if not data:
+ with self.read_transaction():
+ if hash_key in self._data:
+ return False, self._data[hash_key]
+ for db in self.upstream_dbs:
+ if hash_key in db._data:
+ return True, db._data[hash_key]
+ return False, None
+
def _assign_dependencies(self, hash_key, installs, data):
# Add dependencies from other records in the install DB to
# form a full spec.
spec = data[hash_key].spec
spec_dict = installs[hash_key]['spec']
-
if 'dependencies' in spec_dict[spec.name]:
yaml_deps = spec_dict[spec.name]['dependencies']
for dname, dhash, dtypes in spack.spec.Spec.read_yaml_dep_specs(
yaml_deps):
- if dhash not in data:
- tty.warn("Missing dependency not in database: ",
- "%s needs %s-%s" % (
- spec.cformat('$_$/'), dname, dhash[:7]))
+ # It is important that we always check upstream installations
+ # in the same order, and that we always check the local
+ # installation first: if a downstream Spack installs a package
+ # then dependents in that installation could be using it.
+ # If a hash is installed locally and upstream, there isn't
+ # enough information to determine which one a local package
+ # depends on, so the convention ensures that this isn't an
+ # issue.
+ upstream, record = self.query_by_spec_hash(dhash, data=data)
+ child = record.spec if record else None
+
+ if not child:
+ msg = ("Missing dependency not in database: "
+ "%s needs %s-%s" % (
+ spec.cformat('$_$/'), dname, dhash[:7]))
+ if self._fail_when_missing_deps:
+ raise MissingDependenciesError(msg)
+ tty.warn(msg)
continue
- child = data[dhash].spec
spec._add_dependency(child, dtypes)
def _read_from_file(self, stream, format='json'):
@@ -407,7 +465,6 @@ class Database(object):
# TODO: would a more immmutable spec implementation simplify
# this?
data[hash_key] = InstallRecord.from_dict(spec, rec)
-
except Exception as e:
invalid_record(hash_key, e)
@@ -415,6 +472,8 @@ class Database(object):
for hash_key in data:
try:
self._assign_dependencies(hash_key, installs, data)
+ except MissingDependenciesError:
+ raise
except Exception as e:
invalid_record(hash_key, e)
@@ -434,6 +493,10 @@ class Database(object):
Locks the DB if it isn't locked already.
"""
+ if self.is_upstream:
+ raise UpstreamDatabaseLockingError(
+ "Cannot reindex an upstream database")
+
# Special transaction to avoid recursive reindex calls and to
# ignore errors if we need to rebuild a corrupt database.
def _read_suppress_error():
@@ -456,89 +519,93 @@ class Database(object):
)
self._error = None
- # Read first the `spec.yaml` files in the prefixes. They should be
- # considered authoritative with respect to DB reindexing, as
- # entries in the DB may be corrupted in a way that still makes
- # them readable. If we considered DB entries authoritative
- # instead, we would perpetuate errors over a reindex.
-
old_data = self._data
try:
- # Initialize data in the reconstructed DB
- self._data = {}
-
- # Start inspecting the installed prefixes
- processed_specs = set()
-
- for spec in directory_layout.all_specs():
- # Try to recover explicit value from old DB, but
- # default it to True if DB was corrupt. This is
- # just to be conservative in case a command like
- # "autoremove" is run by the user after a reindex.
- tty.debug(
- 'RECONSTRUCTING FROM SPEC.YAML: {0}'.format(spec))
- explicit = True
- inst_time = os.stat(spec.prefix).st_ctime
- if old_data is not None:
- old_info = old_data.get(spec.dag_hash())
- if old_info is not None:
- explicit = old_info.explicit
- inst_time = old_info.installation_time
-
- extra_args = {
- 'explicit': explicit,
- 'installation_time': inst_time
- }
- self._add(spec, directory_layout, **extra_args)
-
- processed_specs.add(spec)
-
- for key, entry in old_data.items():
- # We already took care of this spec using
- # `spec.yaml` from its prefix.
- if entry.spec in processed_specs:
- msg = 'SKIPPING RECONSTRUCTION FROM OLD DB: {0}'
- msg += ' [already reconstructed from spec.yaml]'
- tty.debug(msg.format(entry.spec))
- continue
-
- # If we arrived here it very likely means that
- # we have external specs that are not dependencies
- # of other specs. This may be the case for externally
- # installed compilers or externally installed
- # applications.
- tty.debug(
- 'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
- try:
- layout = spack.store.layout
- if entry.spec.external:
- layout = None
- install_check = True
- else:
- install_check = layout.check_installed(entry.spec)
-
- if install_check:
- kwargs = {
- 'spec': entry.spec,
- 'directory_layout': layout,
- 'explicit': entry.explicit,
- 'installation_time': entry.installation_time # noqa: E501
- }
- self._add(**kwargs)
- processed_specs.add(entry.spec)
- except Exception as e:
- # Something went wrong, so the spec was not restored
- # from old data
- tty.debug(e.message)
- pass
-
- self._check_ref_counts()
-
+ self._construct_from_directory_layout(
+ directory_layout, old_data)
except BaseException:
# If anything explodes, restore old data, skip write.
self._data = old_data
raise
+ def _construct_from_directory_layout(self, directory_layout, old_data):
+ # Read first the `spec.yaml` files in the prefixes. They should be
+ # considered authoritative with respect to DB reindexing, as
+ # entries in the DB may be corrupted in a way that still makes
+ # them readable. If we considered DB entries authoritative
+ # instead, we would perpetuate errors over a reindex.
+
+ with directory_layout.disable_upstream_check():
+ # Initialize data in the reconstructed DB
+ self._data = {}
+
+ # Start inspecting the installed prefixes
+ processed_specs = set()
+
+ for spec in directory_layout.all_specs():
+ # Try to recover explicit value from old DB, but
+ # default it to True if DB was corrupt. This is
+ # just to be conservative in case a command like
+ # "autoremove" is run by the user after a reindex.
+ tty.debug(
+ 'RECONSTRUCTING FROM SPEC.YAML: {0}'.format(spec))
+ explicit = True
+ inst_time = os.stat(spec.prefix).st_ctime
+ if old_data is not None:
+ old_info = old_data.get(spec.dag_hash())
+ if old_info is not None:
+ explicit = old_info.explicit
+ inst_time = old_info.installation_time
+
+ extra_args = {
+ 'explicit': explicit,
+ 'installation_time': inst_time
+ }
+ self._add(spec, directory_layout, **extra_args)
+
+ processed_specs.add(spec)
+
+ for key, entry in old_data.items():
+ # We already took care of this spec using
+ # `spec.yaml` from its prefix.
+ if entry.spec in processed_specs:
+ msg = 'SKIPPING RECONSTRUCTION FROM OLD DB: {0}'
+ msg += ' [already reconstructed from spec.yaml]'
+ tty.debug(msg.format(entry.spec))
+ continue
+
+ # If we arrived here it very likely means that
+ # we have external specs that are not dependencies
+ # of other specs. This may be the case for externally
+ # installed compilers or externally installed
+ # applications.
+ tty.debug(
+ 'RECONSTRUCTING FROM OLD DB: {0}'.format(entry.spec))
+ try:
+ layout = spack.store.layout
+ if entry.spec.external:
+ layout = None
+ install_check = True
+ else:
+ install_check = layout.check_installed(entry.spec)
+
+ if install_check:
+ kwargs = {
+ 'spec': entry.spec,
+ 'directory_layout': layout,
+ 'explicit': entry.explicit,
+ 'installation_time': entry.installation_time # noqa: E501
+ }
+ self._add(**kwargs)
+ processed_specs.add(entry.spec)
+ except Exception as e:
+ # Something went wrong, so the spec was not restored
+ # from old data
+ tty.debug(e.message)
+ pass
+
+ self._check_ref_counts()
+
def _check_ref_counts(self):
"""Ensure consistency of reference counts in the DB.
@@ -606,7 +673,8 @@ class Database(object):
self._read_from_file(self._index_path, format='json')
elif os.path.isfile(self._old_yaml_index_path):
- if os.access(self._db_dir, os.R_OK | os.W_OK):
+ if (not self.is_upstream) and os.access(
+ self._db_dir, os.R_OK | os.W_OK):
# if we can write, then read AND write a JSON file.
self._read_from_file(self._old_yaml_index_path, format='yaml')
with WriteTransaction(self.lock):
@@ -616,6 +684,10 @@ class Database(object):
self._read_from_file(self._old_yaml_index_path, format='yaml')
else:
+ if self.is_upstream:
+ raise UpstreamDatabaseLockingError(
+ "No database index file is present, and upstream"
+ " databases cannot generate an index file")
# The file doesn't exist, try to traverse the directory.
# reindex() takes its own write lock, so no lock here.
with WriteTransaction(self.lock):
@@ -657,6 +729,11 @@ class Database(object):
raise NonConcreteSpecAddError(
"Specs added to DB must be concrete.")
+ key = spec.dag_hash()
+ upstream, record = self.query_by_spec_hash(key)
+ if upstream:
+ return
+
# Retrieve optional arguments
installation_time = installation_time or _now()
@@ -669,7 +746,6 @@ class Database(object):
}
self._add(dep, directory_layout, **extra_args)
- key = spec.dag_hash()
if key not in self._data:
installed = bool(spec.external)
path = None
@@ -682,6 +758,8 @@ class Database(object):
tty.warn(
'Dependency missing due to corrupt install directory:',
path, str(e))
+ elif spec.external_path:
+ path = spec.external_path
# Create a new install record with no deps initially.
new_spec = spec.copy(deps=False)
@@ -696,8 +774,10 @@ class Database(object):
# Connect dependencies from the DB to the new copy.
for name, dep in iteritems(spec.dependencies_dict(_tracked_deps)):
dkey = dep.spec.dag_hash()
- new_spec._add_dependency(self._data[dkey].spec, dep.deptypes)
- self._data[dkey].ref_count += 1
+ upstream, record = self.query_by_spec_hash(dkey)
+ new_spec._add_dependency(record.spec, dep.deptypes)
+ if not upstream:
+ record.ref_count += 1
# Mark concrete once everything is built, and preserve
# the original hash of concrete specs.
@@ -725,7 +805,8 @@ class Database(object):
def _get_matching_spec_key(self, spec, **kwargs):
"""Get the exact spec OR get a single spec that matches."""
key = spec.dag_hash()
- if key not in self._data:
+ upstream, record = self.query_by_spec_hash(key)
+ if not record:
match = self.query_one(spec, **kwargs)
if match:
return match.dag_hash()
@@ -735,7 +816,8 @@ class Database(object):
@_autospec
def get_record(self, spec, **kwargs):
key = self._get_matching_spec_key(spec, **kwargs)
- return self._data[key]
+ upstream, record = self.query_by_spec_hash(key)
+ return record
def _decrement_ref_count(self, spec):
key = spec.dag_hash()
@@ -804,14 +886,18 @@ class Database(object):
for relative in to_add:
hash_key = relative.dag_hash()
- if hash_key not in self._data:
+ upstream, record = self.query_by_spec_hash(hash_key)
+ if not record:
reltype = ('Dependent' if direction == 'parents'
else 'Dependency')
- tty.warn("Inconsistent state! %s %s of %s not in DB"
- % (reltype, hash_key, spec.dag_hash()))
+ msg = ("Inconsistent state! %s %s of %s not in DB"
+ % (reltype, hash_key, spec.dag_hash()))
+ if self._fail_when_missing_deps:
+ raise MissingDependenciesError(msg)
+ tty.warn(msg)
continue
- if not self._data[hash_key].installed:
+ if not record.installed:
continue
relatives.add(relative)
@@ -844,7 +930,7 @@ class Database(object):
continue
# TODO: conditional way to do this instead of catching exceptions
- def query(
+ def _query(
self,
query_spec=any,
known=any,
@@ -898,48 +984,65 @@ class Database(object):
# TODO: like installed and known that can be queried? Or are
# TODO: these really special cases that only belong here?
- # TODO: handling of hashes restriction is not particularly elegant.
- with self.read_transaction():
- # Just look up concrete specs with hashes; no fancy search.
- if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete:
-
- hash_key = query_spec.dag_hash()
- if (hash_key in self._data and
- (not hashes or hash_key in hashes)):
- return [self._data[hash_key].spec]
- else:
- return []
-
- # Abstract specs require more work -- currently we test
- # against everything.
- results = []
- start_date = start_date or datetime.datetime.min
- end_date = end_date or datetime.datetime.max
-
- for key, rec in self._data.items():
- if hashes is not None and rec.spec.dag_hash() not in hashes:
- continue
+ # Just look up concrete specs with hashes; no fancy search.
+ if isinstance(query_spec, spack.spec.Spec) and query_spec.concrete:
+ # TODO: handling of hashes restriction is not particularly elegant.
+ hash_key = query_spec.dag_hash()
+ if (hash_key in self._data and
+ (not hashes or hash_key in hashes)):
+ return [self._data[hash_key].spec]
+ else:
+ return []
- if installed is not any and rec.installed != installed:
- continue
+ # Abstract specs require more work -- currently we test
+ # against everything.
+ results = []
+ start_date = start_date or datetime.datetime.min
+ end_date = end_date or datetime.datetime.max
- if explicit is not any and rec.explicit != explicit:
- continue
+ for key, rec in self._data.items():
+ if hashes is not None and rec.spec.dag_hash() not in hashes:
+ continue
- if known is not any and spack.repo.path.exists(
- rec.spec.name) != known:
- continue
+ if installed is not any and rec.installed != installed:
+ continue
- inst_date = datetime.datetime.fromtimestamp(
- rec.installation_time
- )
- if not (start_date < inst_date < end_date):
- continue
+ if explicit is not any and rec.explicit != explicit:
+ continue
- if query_spec is any or rec.spec.satisfies(query_spec):
- results.append(rec.spec)
+ if known is not any and spack.repo.path.exists(
+ rec.spec.name) != known:
+ continue
- return sorted(results)
+ inst_date = datetime.datetime.fromtimestamp(
+ rec.installation_time
+ )
+ if not (start_date < inst_date < end_date):
+ continue
+
+ if query_spec is any or rec.spec.satisfies(query_spec):
+ results.append(rec.spec)
+
+ return results
+
+ def query_local(self, *args, **kwargs):
+ with self.read_transaction():
+ return sorted(self._query(*args, **kwargs))
+
+ def query(self, *args, **kwargs):
+ upstream_results = []
+ for upstream_db in self.upstream_dbs:
+ # queries for upstream DBs need to *not* lock - we may not
+ # have permissions to do this and the upstream DBs won't know about
+ # us anyway (so e.g. they should never uninstall specs)
+ upstream_results.extend(upstream_db._query(*args, **kwargs) or [])
+
+ local_results = set(self.query_local(*args, **kwargs))
+
+ results = list(local_results) + list(
+ x for x in upstream_results if x not in local_results)
+
+ return sorted(results)
def query_one(self, query_spec, known=any, installed=True):
"""Query for exactly one spec that matches the query spec.
@@ -954,9 +1057,13 @@ class Database(object):
return concrete_specs[0] if concrete_specs else None
def missing(self, spec):
- with self.read_transaction():
- key = spec.dag_hash()
- return key in self._data and not self._data[key].installed
+ key = spec.dag_hash()
+ upstream, record = self.query_by_spec_hash(key)
+ return record and not record.installed
+
+
+class UpstreamDatabaseLockingError(SpackError):
+ """Raised when an operation would need to lock an upstream database"""
class CorruptDatabaseError(SpackError):
@@ -967,6 +1074,10 @@ class NonConcreteSpecAddError(SpackError):
"""Raised when attemptint to add non-concrete spec to DB."""
+class MissingDependenciesError(SpackError):
+ """Raised when DB cannot find records for dependencies"""
+
+
class InvalidDatabaseVersionError(SpackError):
def __init__(self, expected, found):
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 490a073a33..56b5cdeec6 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -8,6 +8,7 @@ import shutil
import glob
import tempfile
import re
+from contextlib import contextmanager
import ruamel.yaml as yaml
@@ -33,6 +34,7 @@ class DirectoryLayout(object):
def __init__(self, root):
self.root = root
+ self.check_upstream = True
@property
def hidden_file_paths(self):
@@ -74,6 +76,13 @@ class DirectoryLayout(object):
"""Return absolute path from the root to a directory for the spec."""
_check_concrete(spec)
+ if spec.external:
+ return spec.external_path
+ if self.check_upstream and spec.package.installed_upstream:
+ raise SpackError(
+ "Internal error: attempted to call path_for_spec on"
+ " upstream-installed package.")
+
path = self.relative_path_for_spec(spec)
assert(not path.startswith(self.root))
return os.path.join(self.root, path)
@@ -164,7 +173,6 @@ class YamlDirectoryLayout(DirectoryLayout):
def __init__(self, root, **kwargs):
super(YamlDirectoryLayout, self).__init__(root)
- self.metadata_dir = kwargs.get('metadata_dir', '.spack')
self.hash_len = kwargs.get('hash_len')
self.path_scheme = kwargs.get('path_scheme') or (
"${ARCHITECTURE}/"
@@ -177,6 +185,9 @@ class YamlDirectoryLayout(DirectoryLayout):
self.path_scheme = self.path_scheme.replace(
"${HASH}", "${HASH:%d}" % self.hash_len)
+ # If any of these paths change, downstream databases may not be able to
+ # locate files in older upstream databases
+ self.metadata_dir = '.spack'
self.spec_file_name = 'spec.yaml'
self.extension_file_name = 'extensions.yaml'
self.build_log_name = 'build.out' # build log.
@@ -190,9 +201,6 @@ class YamlDirectoryLayout(DirectoryLayout):
def relative_path_for_spec(self, spec):
_check_concrete(spec)
- if spec.external:
- return spec.external_path
-
path = spec.format(self.path_scheme)
return path
@@ -222,20 +230,23 @@ class YamlDirectoryLayout(DirectoryLayout):
_check_concrete(spec)
return os.path.join(self.metadata_path(spec), self.spec_file_name)
+ @contextmanager
+ def disable_upstream_check(self):
+ self.check_upstream = False
+ yield
+ self.check_upstream = True
+
def metadata_path(self, spec):
- return os.path.join(self.path_for_spec(spec), self.metadata_dir)
+ return os.path.join(spec.prefix, self.metadata_dir)
def build_log_path(self, spec):
- return os.path.join(self.path_for_spec(spec), self.metadata_dir,
- self.build_log_name)
+ return os.path.join(self.metadata_path(spec), self.build_log_name)
def build_env_path(self, spec):
- return os.path.join(self.path_for_spec(spec), self.metadata_dir,
- self.build_env_name)
+ return os.path.join(self.metadata_path(spec), self.build_env_name)
def build_packages_path(self, spec):
- return os.path.join(self.path_for_spec(spec), self.metadata_dir,
- self.packages_dir)
+ return os.path.join(self.metadata_path(spec), self.packages_dir)
def create_install_directory(self, spec):
_check_concrete(spec)
diff --git a/lib/spack/spack/main.py b/lib/spack/spack/main.py
index 47d0e65283..9b2e5f1bc6 100644
--- a/lib/spack/spack/main.py
+++ b/lib/spack/spack/main.py
@@ -577,9 +577,28 @@ def print_setup_info(*info):
# print roots for all module systems
module_roots = spack.config.get('config:module_roots')
+ module_to_roots = {
+ 'tcl': list(),
+ 'dotkit': list(),
+ 'lmod': list()
+ }
for name, path in module_roots.items():
path = spack.util.path.canonicalize_path(path)
- shell_set('_sp_%s_root' % name, path)
+ module_to_roots[name].append(path)
+
+ other_spack_instances = spack.config.get(
+ 'upstreams') or {}
+ for install_properties in other_spack_instances.values():
+ upstream_module_roots = install_properties.get('modules', {})
+ for module_type, root in upstream_module_roots.items():
+ module_to_roots[module_type].append(root)
+
+ for name, paths in module_to_roots.items():
+ # Environment setup prepends paths, so the order is reversed here to
+ # preserve the intended priority: the modules of the local Spack
+ # instance are the highest-precedence.
+ roots_val = ':'.join(reversed(paths))
+ shell_set('_sp_%s_roots' % name, roots_val)
# print environment module system if available. This can be expensive
# on clusters, so skip it if not needed.
diff --git a/lib/spack/spack/modules/common.py b/lib/spack/spack/modules/common.py
index 5e34315c1c..748013ade4 100644
--- a/lib/spack/spack/modules/common.py
+++ b/lib/spack/spack/modules/common.py
@@ -33,6 +33,7 @@ import datetime
import inspect
import os.path
import re
+import collections
import six
import llnl.util.filesystem
@@ -45,6 +46,7 @@ import spack.tengine as tengine
import spack.util.path
import spack.util.environment
import spack.error
+import spack.util.spack_yaml as syaml
#: config section for this file
configuration = spack.config.get('modules')
@@ -215,6 +217,64 @@ def root_path(name):
return spack.util.path.canonicalize_path(path)
+def generate_module_index(root, modules):
+ entries = syaml.syaml_dict()
+ for m in modules:
+ entry = {
+ 'path': m.layout.filename,
+ 'use_name': m.layout.use_name
+ }
+ entries[m.spec.dag_hash()] = entry
+ index = {'module_index': entries}
+ index_path = os.path.join(root, 'module-index.yaml')
+ llnl.util.filesystem.mkdirp(root)
+ with open(index_path, 'w') as index_file:
+ syaml.dump(index, index_file, default_flow_style=False)
+
+
+ModuleIndexEntry = collections.namedtuple(
+ 'ModuleIndexEntry', ['path', 'use_name'])
+
+
+def read_module_index(root):
+ index_path = os.path.join(root, 'module-index.yaml')
+ if not os.path.exists(index_path):
+ return {}
+ with open(index_path, 'r') as index_file:
+ yaml_content = syaml.load(index_file)
+ index = {}
+ yaml_index = yaml_content['module_index']
+ for dag_hash, module_properties in yaml_index.items():
+ index[dag_hash] = ModuleIndexEntry(
+ module_properties['path'],
+ module_properties['use_name'])
+ return index
+
+
+def read_module_indices():
+ module_type_to_indices = {}
+ other_spack_instances = spack.config.get(
+ 'upstreams') or {}
+
+ for install_properties in other_spack_instances.values():
+ module_type_to_root = install_properties.get('modules', {})
+ for module_type, root in module_type_to_root.items():
+ indices = module_type_to_indices.setdefault(module_type, [])
+ indices.append(read_module_index(root))
+
+ return module_type_to_indices
+
+
+module_type_to_indices = read_module_indices()
+
+
+def upstream_module(spec, module_type):
+ indices = module_type_to_indices[module_type]
+ for index in indices:
+ if spec.dag_hash() in index:
+ return index[spec.dag_hash()]
+
+
class BaseConfiguration(object):
"""Manipulates the information needed to generate a module file to make
querying easier. It needs to be sub-classed for specific module types.
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 9114297c1d..6f90b3e162 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -519,6 +519,15 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
super(PackageBase, self).__init__()
+ @property
+ def installed_upstream(self):
+ if not hasattr(self, '_installed_upstream'):
+ upstream, record = spack.store.db.query_by_spec_hash(
+ self.spec.dag_hash())
+ self._installed_upstream = upstream
+
+ return self._installed_upstream
+
def possible_dependencies(
self, transitive=True, expand_virtuals=True, visited=None):
"""Return set of possible dependencies of this package.
@@ -1396,6 +1405,14 @@ class PackageBase(with_metaclass(PackageMeta, PackageViewMixin, object)):
if self.spec.external:
return self._process_external_package(explicit)
+ if self.installed_upstream:
+ tty.msg("{0.name} is installed in an upstream Spack instance"
+ " at {0.prefix}".format(self))
+ # Note this skips all post-install hooks. In the case of modules
+ # this is considered correct because we want to retrieve the
+ # module from the upstream Spack instance.
+ return
+
partial = self.check_for_unfinished_installation(keep_prefix, restage)
# Ensure package is not already installed
diff --git a/lib/spack/spack/schema/merged.py b/lib/spack/spack/schema/merged.py
index ccd12ee039..1832848f77 100644
--- a/lib/spack/spack/schema/merged.py
+++ b/lib/spack/spack/schema/merged.py
@@ -16,6 +16,7 @@ import spack.schema.mirrors
import spack.schema.modules
import spack.schema.packages
import spack.schema.repos
+import spack.schema.upstreams
#: Properties for inclusion in other schemas
@@ -25,7 +26,8 @@ properties = union_dicts(
spack.schema.mirrors.properties,
spack.schema.modules.properties,
spack.schema.packages.properties,
- spack.schema.repos.properties
+ spack.schema.repos.properties,
+ spack.schema.upstreams.properties
)
diff --git a/lib/spack/spack/schema/upstreams.py b/lib/spack/spack/schema/upstreams.py
new file mode 100644
index 0000000000..4bbbac17be
--- /dev/null
+++ b/lib/spack/spack/schema/upstreams.py
@@ -0,0 +1,40 @@
+# Copyright 2013-2018 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+
+#: Properties for inclusion in other schemas
+properties = {
+ 'upstreams': {
+ 'type': 'object',
+ 'default': {},
+ 'patternProperties': {
+ r'\w[\w-]*': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'install_tree': {'type': 'string'},
+ 'modules': {
+ 'type': 'object',
+ 'properties': {
+ 'tcl': {'type': 'string'},
+ 'lmod': {'type': 'string'},
+ 'dotkit': {'type': 'string'}
+ }
+ }
+ }
+ }
+ }
+ }
+}
+
+#: Full schema with metadata
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack core configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'properties': properties,
+}
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 76c06d3fd7..e74d38ea5e 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1267,7 +1267,12 @@ class Spec(object):
@property
def prefix(self):
if self._prefix is None:
- self.prefix = spack.store.layout.path_for_spec(self)
+ upstream, record = spack.store.db.query_by_spec_hash(
+ self.dag_hash())
+ if record and record.path:
+ self.prefix = record.path
+ else:
+ self.prefix = spack.store.layout.path_for_spec(self)
return self._prefix
@prefix.setter
@@ -3302,7 +3307,9 @@ class Spec(object):
if status_fn:
status = status_fn(node)
- if status is None:
+ if node.package.installed_upstream:
+ out += colorize("@g{[^]} ", color=color)
+ elif status is None:
out += colorize("@K{ - } ", color=color) # not installed
elif status:
out += colorize("@g{[+]} ", color=color) # installed
diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py
index b5562ecaee..a4f7596fc2 100644
--- a/lib/spack/spack/store.py
+++ b/lib/spack/spack/store.py
@@ -58,7 +58,8 @@ class Store(object):
"""
def __init__(self, root, path_scheme=None, hash_length=None):
self.root = root
- self.db = spack.database.Database(root)
+ self.db = spack.database.Database(
+ root, upstream_dbs=retrieve_upstream_dbs())
self.layout = spack.directory_layout.YamlDirectoryLayout(
root, hash_len=hash_length, path_scheme=path_scheme)
@@ -84,3 +85,27 @@ store = llnl.util.lang.Singleton(_store)
root = llnl.util.lang.LazyReference(lambda: store.root)
db = llnl.util.lang.LazyReference(lambda: store.db)
layout = llnl.util.lang.LazyReference(lambda: store.layout)
+
+
+def retrieve_upstream_dbs():
+ other_spack_instances = spack.config.get('upstreams', {})
+
+ install_roots = []
+ for install_properties in other_spack_instances.values():
+ install_roots.append(install_properties['install_tree'])
+
+ return _construct_upstream_dbs_from_install_roots(install_roots)
+
+
+def _construct_upstream_dbs_from_install_roots(
+ install_roots, _test=False):
+ accumulated_upstream_dbs = []
+ for install_root in reversed(install_roots):
+ upstream_dbs = list(accumulated_upstream_dbs)
+ next_db = spack.database.Database(
+ install_root, is_upstream=True, upstream_dbs=upstream_dbs)
+ next_db._fail_when_missing_deps = _test
+ next_db._read()
+ accumulated_upstream_dbs.insert(0, next_db)
+
+ return accumulated_upstream_dbs
diff --git a/lib/spack/spack/test/cmd/print_shell_vars.py b/lib/spack/spack/test/cmd/print_shell_vars.py
index 2603336c7b..e5522ea016 100644
--- a/lib/spack/spack/test/cmd/print_shell_vars.py
+++ b/lib/spack/spack/test/cmd/print_shell_vars.py
@@ -11,8 +11,8 @@ def test_print_shell_vars_sh(capsys):
out, _ = capsys.readouterr()
assert "_sp_sys_type=" in out
- assert "_sp_tcl_root=" in out
- assert "_sp_lmod_root=" in out
+ assert "_sp_tcl_roots=" in out
+ assert "_sp_lmod_roots=" in out
assert "_sp_module_prefix" not in out
@@ -21,8 +21,8 @@ def test_print_shell_vars_csh(capsys):
out, _ = capsys.readouterr()
assert "set _sp_sys_type = " in out
- assert "set _sp_tcl_root = " in out
- assert "set _sp_lmod_root = " in out
+ assert "set _sp_tcl_roots = " in out
+ assert "set _sp_lmod_roots = " in out
assert "set _sp_module_prefix = " not in out
@@ -31,8 +31,8 @@ def test_print_shell_vars_sh_modules(capsys):
out, _ = capsys.readouterr()
assert "_sp_sys_type=" in out
- assert "_sp_tcl_root=" in out
- assert "_sp_lmod_root=" in out
+ assert "_sp_tcl_roots=" in out
+ assert "_sp_lmod_roots=" in out
assert "_sp_module_prefix=" in out
@@ -41,6 +41,6 @@ def test_print_shell_vars_csh_modules(capsys):
out, _ = capsys.readouterr()
assert "set _sp_sys_type = " in out
- assert "set _sp_tcl_root = " in out
- assert "set _sp_lmod_root = " in out
+ assert "set _sp_tcl_roots = " in out
+ assert "set _sp_lmod_roots = " in out
assert "set _sp_module_prefix = " in out
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index d158cb0f45..ff95f463bd 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -435,6 +435,29 @@ def mock_fetch(mock_archive):
PackageBase.fetcher = orig_fn
+class MockLayout(object):
+ def __init__(self, root):
+ self.root = root
+
+ def path_for_spec(self, spec):
+ return '/'.join([self.root, spec.name])
+
+ def check_installed(self, spec):
+ return True
+
+
+@pytest.fixture()
+def gen_mock_layout(tmpdir):
+ # Generate a MockLayout in a temporary directory. In general the prefixes
+ # specified by MockLayout should never be written to, but this ensures
+ # that even if they are, that it causes no harm
+ def create_layout(root):
+ subroot = tmpdir.mkdir(root)
+ return MockLayout(str(subroot))
+
+ yield create_layout
+
+
@pytest.fixture()
def module_configuration(monkeypatch, request):
"""Reads the module configuration file from the mock ones prepared
@@ -758,6 +781,7 @@ class MockPackage(object):
self.name = name
self.spec = None
self.dependencies = ordereddict_backport.OrderedDict()
+ self._installed_upstream = False
assert len(dependencies) == len(dependency_types)
for dep, dtype in zip(dependencies, dependency_types):
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 5a492d7190..d6eb2cc618 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -17,7 +17,9 @@ from llnl.util.tty.colify import colify
import spack.repo
import spack.store
-from spack.test.conftest import MockPackageMultiRepo
+import spack.database
+import spack.spec
+from spack.test.conftest import MockPackage, MockPackageMultiRepo
from spack.util.executable import Executable
@@ -25,6 +27,179 @@ pytestmark = pytest.mark.db
@pytest.fixture()
+def test_store(tmpdir):
+ real_store = spack.store.store
+ spack.store.store = spack.store.Store(str(tmpdir.join('test_store')))
+
+ yield
+
+ spack.store.store = real_store
+
+
+@pytest.fixture()
+def upstream_and_downstream_db(tmpdir_factory, gen_mock_layout):
+ mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))
+ upstream_db = spack.database.Database(mock_db_root)
+ # Generate initial DB file to avoid reindex
+ with open(upstream_db._index_path, 'w') as db_file:
+ upstream_db._write_to_file(db_file)
+ upstream_layout = gen_mock_layout('/a/')
+
+ downstream_db_root = str(
+ tmpdir_factory.mktemp('mock_downstream_db_root'))
+ downstream_db = spack.database.Database(
+ downstream_db_root, upstream_dbs=[upstream_db])
+ with open(downstream_db._index_path, 'w') as db_file:
+ downstream_db._write_to_file(db_file)
+ downstream_layout = gen_mock_layout('/b/')
+
+ yield upstream_db, upstream_layout, downstream_db, downstream_layout
+
+
+@pytest.mark.usefixtures('config')
+def test_installed_upstream(upstream_and_downstream_db):
+ upstream_db, upstream_layout, downstream_db, downstream_layout = (
+ upstream_and_downstream_db)
+
+ default = ('build', 'link')
+ x = MockPackage('x', [], [])
+ z = MockPackage('z', [], [])
+ y = MockPackage('y', [z], [default])
+ w = MockPackage('w', [x, y], [default, default])
+ mock_repo = MockPackageMultiRepo([w, x, y, z])
+
+ with spack.repo.swap(mock_repo):
+ spec = spack.spec.Spec('w')
+ spec.concretize()
+
+ for dep in spec.traverse(root=False):
+ upstream_db.add(dep, upstream_layout)
+
+ new_spec = spack.spec.Spec('w')
+ new_spec.concretize()
+ downstream_db.add(new_spec, downstream_layout)
+ for dep in new_spec.traverse(root=False):
+ upstream, record = downstream_db.query_by_spec_hash(
+ dep.dag_hash())
+ assert upstream
+ assert record.path == upstream_layout.path_for_spec(dep)
+ upstream, record = downstream_db.query_by_spec_hash(
+ new_spec.dag_hash())
+ assert not upstream
+ assert record.installed
+
+ upstream_db._check_ref_counts()
+ downstream_db._check_ref_counts()
+
+
+@pytest.mark.usefixtures('config')
+def test_removed_upstream_dep(upstream_and_downstream_db):
+ upstream_db, upstream_layout, downstream_db, downstream_layout = (
+ upstream_and_downstream_db)
+
+ default = ('build', 'link')
+ z = MockPackage('z', [], [])
+ y = MockPackage('y', [z], [default])
+ mock_repo = MockPackageMultiRepo([y, z])
+
+ with spack.repo.swap(mock_repo):
+ spec = spack.spec.Spec('y')
+ spec.concretize()
+
+ upstream_db.add(spec['z'], upstream_layout)
+
+ new_spec = spack.spec.Spec('y')
+ new_spec.concretize()
+ downstream_db.add(new_spec, downstream_layout)
+
+ upstream_db.remove(new_spec['z'])
+
+ new_downstream = spack.database.Database(
+ downstream_db.root, upstream_dbs=[upstream_db])
+ new_downstream._fail_when_missing_deps = True
+ with pytest.raises(spack.database.MissingDependenciesError):
+ new_downstream._read()
+
+
+@pytest.mark.usefixtures('config')
+def test_add_to_upstream_after_downstream(upstream_and_downstream_db):
+ """An upstream DB can add a package after it is installed in the downstream
+ DB. When a package is recorded as installed in both, the results should
+ refer to the downstream DB.
+ """
+ upstream_db, upstream_layout, downstream_db, downstream_layout = (
+ upstream_and_downstream_db)
+
+ x = MockPackage('x', [], [])
+ mock_repo = MockPackageMultiRepo([x])
+
+ with spack.repo.swap(mock_repo):
+ spec = spack.spec.Spec('x')
+ spec.concretize()
+
+ downstream_db.add(spec, downstream_layout)
+
+ upstream_db.add(spec, upstream_layout)
+
+ upstream, record = downstream_db.query_by_spec_hash(spec.dag_hash())
+ # Even though the package is recorded as installed in the upstream DB,
+ # we prefer the locally-installed instance
+ assert not upstream
+
+ qresults = downstream_db.query('x')
+ assert len(qresults) == 1
+ queried_spec, = qresults
+ try:
+ orig_db = spack.store.db
+ spack.store.db = downstream_db
+ assert queried_spec.prefix == downstream_layout.path_for_spec(spec)
+ finally:
+ spack.store.db = orig_db
+
+
+@pytest.mark.usefixtures('config')
+def test_recursive_upstream_dbs(tmpdir_factory, test_store, gen_mock_layout):
+ roots = [str(tmpdir_factory.mktemp(x)) for x in ['a', 'b', 'c']]
+ layouts = [gen_mock_layout(x) for x in ['/ra/', '/rb/', '/rc/']]
+
+ default = ('build', 'link')
+ z = MockPackage('z', [], [])
+ y = MockPackage('y', [z], [default])
+ x = MockPackage('x', [y], [default])
+
+ mock_repo = MockPackageMultiRepo([x, y, z])
+
+ with spack.repo.swap(mock_repo):
+ spec = spack.spec.Spec('x')
+ spec.concretize()
+ db_c = spack.database.Database(roots[2])
+ db_c.add(spec['z'], layouts[2])
+
+ db_b = spack.database.Database(roots[1], upstream_dbs=[db_c])
+ db_b.add(spec['y'], layouts[1])
+
+ db_a = spack.database.Database(roots[0], upstream_dbs=[db_b, db_c])
+ db_a.add(spec['x'], layouts[0])
+
+ dbs = spack.store._construct_upstream_dbs_from_install_roots(
+ roots, _test=True)
+
+ assert dbs[0].db_for_spec_hash(spec.dag_hash()) == dbs[0]
+ assert dbs[0].db_for_spec_hash(spec['y'].dag_hash()) == dbs[1]
+ assert dbs[0].db_for_spec_hash(spec['z'].dag_hash()) == dbs[2]
+
+ dbs[0]._check_ref_counts()
+ dbs[1]._check_ref_counts()
+ dbs[2]._check_ref_counts()
+
+ assert (dbs[0].installed_relatives(spec) ==
+ set(spec.traverse(root=False)))
+ assert (dbs[0].installed_relatives(spec['z'], direction='parents') ==
+ set([spec, spec['y']]))
+ assert not dbs[2].installed_relatives(spec['z'], direction='parents')
+
+
+@pytest.fixture()
def usr_folder_exists(monkeypatch):
"""The ``/usr`` folder is assumed to be existing in some tests. This
fixture makes it such that its existence is mocked, so we have no
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index d415d46dc8..f0c3ad6ff9 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -22,7 +22,11 @@ max_packages = 10
@pytest.fixture()
def layout_and_dir(tmpdir):
"""Returns a directory layout and the corresponding directory."""
- yield YamlDirectoryLayout(str(tmpdir)), str(tmpdir)
+ layout = YamlDirectoryLayout(str(tmpdir))
+ old_layout = spack.store.layout
+ spack.store.layout = layout
+ yield layout, str(tmpdir)
+ spack.store.layout = old_layout
def test_yaml_directory_layout_parameters(
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index 37e271aba3..79287c2adf 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -125,6 +125,77 @@ def test_installed_dependency_request_conflicts(
dependent.concretize()
+def test_installed_upstream_external(
+ tmpdir_factory, install_mockery, mock_fetch, gen_mock_layout):
+ """Check that when a dependency package is recorded as installed in
+ an upstream database that it is not reinstalled.
+ """
+ mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))
+ prepared_db = spack.database.Database(mock_db_root)
+
+ upstream_layout = gen_mock_layout('/a/')
+
+ dependency = spack.spec.Spec('externaltool')
+ dependency.concretize()
+ prepared_db.add(dependency, upstream_layout)
+
+ try:
+ original_db = spack.store.db
+ downstream_db_root = str(
+ tmpdir_factory.mktemp('mock_downstream_db_root'))
+ spack.store.db = spack.database.Database(
+ downstream_db_root, upstream_dbs=[prepared_db])
+ dependent = spack.spec.Spec('externaltest')
+ dependent.concretize()
+
+ new_dependency = dependent['externaltool']
+ assert new_dependency.external
+ assert new_dependency.prefix == '/path/to/external_tool'
+
+ dependent.package.do_install()
+
+ assert not os.path.exists(new_dependency.prefix)
+ assert os.path.exists(dependent.prefix)
+ finally:
+ spack.store.db = original_db
+
+
+def test_installed_upstream(tmpdir_factory, install_mockery, mock_fetch,
+ gen_mock_layout):
+ """Check that when a dependency package is recorded as installed in
+ an upstream database that it is not reinstalled.
+ """
+ mock_db_root = str(tmpdir_factory.mktemp('mock_db_root'))
+ prepared_db = spack.database.Database(mock_db_root)
+
+ upstream_layout = gen_mock_layout('/a/')
+
+ dependency = spack.spec.Spec('dependency-install')
+ dependency.concretize()
+ prepared_db.add(dependency, upstream_layout)
+
+ try:
+ original_db = spack.store.db
+ downstream_db_root = str(
+ tmpdir_factory.mktemp('mock_downstream_db_root'))
+ spack.store.db = spack.database.Database(
+ downstream_db_root, upstream_dbs=[prepared_db])
+ dependent = spack.spec.Spec('dependent-install')
+ dependent.concretize()
+
+ new_dependency = dependent['dependency-install']
+ assert new_dependency.package.installed_upstream
+ assert (new_dependency.prefix ==
+ upstream_layout.path_for_spec(dependency))
+
+ dependent.package.do_install()
+
+ assert not os.path.exists(new_dependency.prefix)
+ assert os.path.exists(dependent.prefix)
+ finally:
+ spack.store.db = original_db
+
+
@pytest.mark.disable_clean_stage_check
def test_partial_install_keep_prefix(install_mockery, mock_fetch):
spec = Spec('canfail').concretized()
diff --git a/lib/spack/spack/test/modules/tcl.py b/lib/spack/spack/test/modules/tcl.py
index 9e116e0938..852f553f9f 100644
--- a/lib/spack/spack/test/modules/tcl.py
+++ b/lib/spack/spack/test/modules/tcl.py
@@ -192,6 +192,23 @@ class TestTcl(object):
with pytest.raises(SystemExit):
modulefile_content('mpileaks')
+ def test_module_index(
+ self, module_configuration, factory, tmpdir_factory):
+
+ module_configuration('suffix')
+
+ w1, s1 = factory('mpileaks')
+ w2, s2 = factory('callpath')
+
+ test_root = str(tmpdir_factory.mktemp('module-root'))
+
+ spack.modules.common.generate_module_index(test_root, [w1, w2])
+
+ index = spack.modules.common.read_module_index(test_root)
+
+ assert index[s1.dag_hash()].use_name == w1.layout.use_name
+ assert index[s2.dag_hash()].path == w2.layout.filename
+
def test_suffixes(self, module_configuration, factory):
"""Tests adding suffixes to module file name."""
module_configuration('suffix')