summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorMassimiliano Culpo <massimiliano.culpo@gmail.com>2023-07-17 16:51:04 +0200
committerGitHub <noreply@github.com>2023-07-17 16:51:04 +0200
commit53ae969aa008c8fb358f33b9b961beb5512999ff (patch)
tree6ecb773c5a418fabd4f8fbec10ca6dcc77f1e5f9 /lib
parent2b5a7bb4d7428529d7603d1b711beab0baa76b1c (diff)
downloadspack-53ae969aa008c8fb358f33b9b961beb5512999ff.tar.gz
spack-53ae969aa008c8fb358f33b9b961beb5512999ff.tar.bz2
spack-53ae969aa008c8fb358f33b9b961beb5512999ff.tar.xz
spack-53ae969aa008c8fb358f33b9b961beb5512999ff.zip
Lock, database and store don't need global configuration on construction (#33495)
Lock objects can now be instantiated independently, without being tied to the global configuration. The same is true for database and store objects. The database __init__ method has been simplified to take a single lock configuration object. Some common lock configurations (e.g. NO_LOCK or NO_TIMEOUT) have been named and are provided as globals. The use_store context manager keeps the configuration consistent by pushing and popping an internal scope. It can also be tuned by passing extra data to set up e.g. upstreams or anything else that might be related to the store.
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/docs/conf.py1
-rw-r--r--lib/spack/llnl/util/lang.py2
-rw-r--r--lib/spack/spack/binary_distribution.py30
-rw-r--r--lib/spack/spack/bootstrap/__init__.py3
-rw-r--r--lib/spack/spack/cmd/modules/__init__.py4
-rw-r--r--lib/spack/spack/config.py4
-rw-r--r--lib/spack/spack/database.py291
-rw-r--r--lib/spack/spack/store.py213
-rw-r--r--lib/spack/spack/subprocess_context.py28
-rw-r--r--lib/spack/spack/test/bindist.py11
-rw-r--r--lib/spack/spack/test/cmd/bootstrap.py2
-rw-r--r--lib/spack/spack/test/cmd/env.py17
-rw-r--r--lib/spack/spack/test/cmd/location.py2
-rw-r--r--lib/spack/spack/test/cmd/mirror.py4
-rw-r--r--lib/spack/spack/test/config.py10
-rw-r--r--lib/spack/spack/test/config_values.py16
-rw-r--r--lib/spack/spack/test/conftest.py25
-rw-r--r--lib/spack/spack/test/database.py12
-rw-r--r--lib/spack/spack/test/install.py25
-rw-r--r--lib/spack/spack/test/repo.py22
-rw-r--r--lib/spack/spack/test/sbang.py2
-rw-r--r--lib/spack/spack/test/util/spack_lock_wrapper.py24
-rw-r--r--lib/spack/spack/test/util/spack_yaml.py10
-rw-r--r--lib/spack/spack/util/lock.py10
24 files changed, 431 insertions, 337 deletions
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index 34710ea73a..20c10ee486 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -214,6 +214,7 @@ nitpick_ignore = [
# Spack classes that intersphinx is unable to resolve
("py:class", "spack.version.StandardVersion"),
("py:class", "spack.spec.DependencySpec"),
+ ("py:class", "spack.spec.SpecfileReaderBase"),
("py:class", "spack.install_test.Pb"),
]
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index ffee4443df..ae75db621f 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -821,7 +821,7 @@ class Singleton:
# 'instance'/'_instance' to be defined or it will enter an infinite
# loop, so protect against that here.
if name in ["_instance", "instance"]:
- raise AttributeError()
+ raise AttributeError(f"cannot create {name}")
return getattr(self.instance, name)
def __getitem__(self, name):
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 6f6e0de4a3..693a864a6b 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -61,6 +61,22 @@ _build_cache_relative_path = "build_cache"
_build_cache_keys_relative_path = "_pgp"
+class BuildCacheDatabase(spack_db.Database):
+ """A database for binary buildcaches.
+
+ A database supports writing buildcache index files, in which case certain fields are not
+ needed in each install record, and no locking is required. To use this feature, it provides
+ ``lock_cfg=NO_LOCK``, and override the list of ``record_fields``.
+ """
+
+ record_fields = ("spec", "ref_count", "in_buildcache")
+
+ def __init__(self, root):
+ super().__init__(root, lock_cfg=spack_db.NO_LOCK)
+ self._write_transaction_impl = llnl.util.lang.nullcontext
+ self._read_transaction_impl = llnl.util.lang.nullcontext
+
+
class FetchCacheError(Exception):
"""Error thrown when fetching the cache failed, usually a composite error list."""
@@ -190,8 +206,7 @@ class BinaryCacheIndex:
tmpdir = tempfile.mkdtemp()
try:
- db_root_dir = os.path.join(tmpdir, "db_root")
- db = spack_db.Database(None, db_dir=db_root_dir, enable_transaction_locking=False)
+ db = BuildCacheDatabase(tmpdir)
try:
self._index_file_cache.init_entry(cache_key)
@@ -1059,13 +1074,10 @@ def generate_package_index(cache_prefix, concurrency=32):
tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
tmpdir = tempfile.mkdtemp()
- db_root_dir = os.path.join(tmpdir, "db_root")
- db = spack_db.Database(
- None,
- db_dir=db_root_dir,
- enable_transaction_locking=False,
- record_fields=["spec", "ref_count", "in_buildcache"],
- )
+
+ db = BuildCacheDatabase(tmpdir)
+ db.root = None
+ db_root_dir = db.database_directory
try:
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
diff --git a/lib/spack/spack/bootstrap/__init__.py b/lib/spack/spack/bootstrap/__init__.py
index 1dad6597a6..1f2f239de3 100644
--- a/lib/spack/spack/bootstrap/__init__.py
+++ b/lib/spack/spack/bootstrap/__init__.py
@@ -4,7 +4,7 @@
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
"""Function and classes needed to bootstrap Spack itself."""
-from .config import ensure_bootstrap_configuration, is_bootstrapping
+from .config import ensure_bootstrap_configuration, is_bootstrapping, store_path
from .core import all_core_root_specs, ensure_core_dependencies, ensure_patchelf_in_path_or_raise
from .environment import BootstrapEnvironment, ensure_environment_dependencies
from .status import status_message
@@ -18,4 +18,5 @@ __all__ = [
"ensure_environment_dependencies",
"BootstrapEnvironment",
"status_message",
+ "store_path",
]
diff --git a/lib/spack/spack/cmd/modules/__init__.py b/lib/spack/spack/cmd/modules/__init__.py
index 1b5ed47bf8..ec54b0d0e7 100644
--- a/lib/spack/spack/cmd/modules/__init__.py
+++ b/lib/spack/spack/cmd/modules/__init__.py
@@ -368,7 +368,9 @@ callbacks = {"refresh": refresh, "rm": rm, "find": find, "loads": loads}
def modules_cmd(parser, args, module_type, callbacks=callbacks):
# Qualifiers to be used when querying the db for specs
- constraint_qualifiers = {"refresh": {"installed": True, "known": True}}
+ constraint_qualifiers = {
+ "refresh": {"installed": True, "known": lambda x: not spack.repo.path.exists(x)}
+ }
query_args = constraint_qualifiers.get(args.subparser_name, {})
# Get the specs that match the query from the DB
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index d7f4638b7c..9df6eead82 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -767,7 +767,7 @@ def _add_command_line_scopes(cfg, command_line_scopes):
_add_platform_scope(cfg, ImmutableConfigScope, name, path)
-def _config():
+def create():
"""Singleton Configuration instance.
This constructs one instance associated with this module and returns
@@ -825,7 +825,7 @@ def _config():
#: This is the singleton configuration instance for Spack.
-config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_config)
+config: Union[Configuration, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(create)
def add_from_file(filename, scope=None):
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index 2d0628124b..9d12cbb95b 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -2,7 +2,6 @@
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
"""Spack's installation tracking database.
The database serves two purposes:
@@ -19,14 +18,13 @@ as the authoritative database of packages in Spack. This module
provides a cache and a sanity checking mechanism for what is in the
filesystem.
"""
-
import contextlib
import datetime
import os
import socket
import sys
import time
-from typing import Dict
+from typing import Dict, List, NamedTuple, Set, Type, Union
try:
import uuid
@@ -39,13 +37,10 @@ except ImportError:
from typing import Optional, Tuple
import llnl.util.filesystem as fs
-import llnl.util.lang as lang
import llnl.util.tty as tty
import spack.hash_types as ht
-import spack.repo
import spack.spec
-import spack.store
import spack.util.lock as lk
import spack.util.spack_json as sjson
import spack.version as vn
@@ -56,17 +51,17 @@ from spack.util.crypto import bit_length
# TODO: Provide an API automatically retyring a build after detecting and
# TODO: clearing a failure.
-# DB goes in this directory underneath the root
-_db_dirname = ".spack-db"
+#: DB goes in this directory underneath the root
+_DB_DIRNAME = ".spack-db"
-# DB version. This is stuck in the DB file to track changes in format.
-# Increment by one when the database format changes.
-# Versions before 5 were not integers.
-_db_version = vn.Version("7")
+#: DB version. This is stuck in the DB file to track changes in format.
+#: Increment by one when the database format changes.
+#: Versions before 5 were not integers.
+_DB_VERSION = vn.Version("7")
-# For any version combinations here, skip reindex when upgrading.
-# Reindexing can take considerable time and is not always necessary.
-_skip_reindex = [
+#: For any version combinations here, skip reindex when upgrading.
+#: Reindexing can take considerable time and is not always necessary.
+_SKIP_REINDEX = [
# reindexing takes a significant amount of time, and there's
# no reason to do it from DB version 0.9.3 to version 5. The
# only difference is that v5 can contain "deprecated_for"
@@ -77,26 +72,26 @@ _skip_reindex = [
(vn.Version("6"), vn.Version("7")),
]
-# Default timeout for spack database locks in seconds or None (no timeout).
-# A balance needs to be struck between quick turnaround for parallel installs
-# (to avoid excess delays) and waiting long enough when the system is busy
-# (to ensure the database is updated).
-_db_lock_timeout = 120
-
-# Default timeout for spack package locks in seconds or None (no timeout).
-# A balance needs to be struck between quick turnaround for parallel installs
-# (to avoid excess delays when performing a parallel installation) and waiting
-# long enough for the next possible spec to install (to avoid excessive
-# checking of the last high priority package) or holding on to a lock (to
-# ensure a failed install is properly tracked).
-_pkg_lock_timeout = None
-
-# Types of dependencies tracked by the database
-# We store by DAG hash, so we track the dependencies that the DAG hash includes.
-_tracked_deps = ht.dag_hash.deptype
-
-# Default list of fields written for each install record
-default_install_record_fields = [
+#: Default timeout for spack database locks in seconds or None (no timeout).
+#: A balance needs to be struck between quick turnaround for parallel installs
+#: (to avoid excess delays) and waiting long enough when the system is busy
+#: (to ensure the database is updated).
+_DEFAULT_DB_LOCK_TIMEOUT = 120
+
+#: Default timeout for spack package locks in seconds or None (no timeout).
+#: A balance needs to be struck between quick turnaround for parallel installs
+#: (to avoid excess delays when performing a parallel installation) and waiting
+#: long enough for the next possible spec to install (to avoid excessive
+#: checking of the last high priority package) or holding on to a lock (to
+#: ensure a failed install is properly tracked).
+_DEFAULT_PKG_LOCK_TIMEOUT = None
+
+#: Types of dependencies tracked by the database
+#: We store by DAG hash, so we track the dependencies that the DAG hash includes.
+_TRACKED_DEPENDENCIES = ht.dag_hash.deptype
+
+#: Default list of fields written for each install record
+DEFAULT_INSTALL_RECORD_FIELDS = (
"spec",
"ref_count",
"path",
@@ -104,10 +99,10 @@ default_install_record_fields = [
"explicit",
"installation_time",
"deprecated_for",
-]
+)
-def reader(version):
+def reader(version: vn.StandardVersion) -> Type["spack.spec.SpecfileReaderBase"]:
reader_cls = {
vn.Version("5"): spack.spec.SpecfileV1,
vn.Version("6"): spack.spec.SpecfileV3,
@@ -116,7 +111,7 @@ def reader(version):
return reader_cls[version]
-def _now():
+def _now() -> float:
"""Returns the time since the epoch"""
return time.time()
@@ -220,7 +215,7 @@ class InstallRecord:
else:
return InstallStatuses.MISSING in installed
- def to_dict(self, include_fields=default_install_record_fields):
+ def to_dict(self, include_fields=DEFAULT_INSTALL_RECORD_FIELDS):
rec_dict = {}
for field_name in include_fields:
@@ -256,11 +251,14 @@ class ForbiddenLockError(SpackError):
class ForbiddenLock:
- def __getattribute__(self, name):
+ def __getattr__(self, name):
raise ForbiddenLockError("Cannot access attribute '{0}' of lock".format(name))
+ def __reduce__(self):
+ return ForbiddenLock, tuple()
+
-_query_docstring = """
+_QUERY_DOCSTRING = """
Args:
query_spec: queries iterate through specs in the database and
@@ -308,73 +306,106 @@ _query_docstring = """
"""
+#: Data class to configure locks in Database objects
+#:
+#: Args:
+#: enable (bool): whether to enable locks or not.
+#: database_timeout (int or None): timeout for the database lock
+#: package_timeout (int or None): timeout for the package lock
-class Database:
- """Per-process lock objects for each install prefix."""
+class LockConfiguration(NamedTuple):
+ enable: bool
+ database_timeout: Optional[int]
+ package_timeout: Optional[int]
+
+
+#: Configure a database to avoid using locks
+NO_LOCK: LockConfiguration = LockConfiguration(
+ enable=False, database_timeout=None, package_timeout=None
+)
+
+
+#: Configure the database to use locks without a timeout
+NO_TIMEOUT: LockConfiguration = LockConfiguration(
+ enable=True, database_timeout=None, package_timeout=None
+)
+
+#: Default configuration for database locks
+DEFAULT_LOCK_CFG: LockConfiguration = LockConfiguration(
+ enable=True,
+ database_timeout=_DEFAULT_DB_LOCK_TIMEOUT,
+ package_timeout=_DEFAULT_PKG_LOCK_TIMEOUT,
+)
+
+
+def lock_configuration(configuration):
+ """Return a LockConfiguration from a spack.config.Configuration object."""
+ return LockConfiguration(
+ enable=configuration.get("config:locks", True),
+ database_timeout=configuration.get("config:db_lock_timeout"),
+ package_timeout=configuration.get("config:db_lock_timeout"),
+ )
+
+class Database:
+ #: Per-process lock objects for each install prefix
_prefix_locks: Dict[str, lk.Lock] = {}
- """Per-process failure (lock) objects for each install prefix."""
+ #: Per-process failure (lock) objects for each install prefix
_prefix_failures: Dict[str, lk.Lock] = {}
+ #: Fields written for each install record
+ record_fields: Tuple[str, ...] = DEFAULT_INSTALL_RECORD_FIELDS
+
def __init__(
self,
- root,
- db_dir=None,
- upstream_dbs=None,
- is_upstream=False,
- enable_transaction_locking=True,
- record_fields=default_install_record_fields,
- ):
- """Create a Database for Spack installations under ``root``.
-
- A Database is a cache of Specs data from ``$prefix/spec.yaml``
- files in Spack installation directories.
+ root: str,
+ upstream_dbs: Optional[List["Database"]] = None,
+ is_upstream: bool = False,
+ lock_cfg: LockConfiguration = DEFAULT_LOCK_CFG,
+ ) -> None:
+ """Database for Spack installations.
- By default, Database files (data and lock files) are stored
- under ``root/.spack-db``, which is created if it does not
- exist. This is the ``db_dir``.
+ A Database is a cache of Specs data from ``$prefix/spec.yaml`` files
+ in Spack installation directories.
- The Database will attempt to read an ``index.json`` file in
- ``db_dir``. If that does not exist, it will create a database
- when needed by scanning the entire Database root for ``spec.yaml``
- files according to Spack's ``DirectoryLayout``.
+ Database files (data and lock files) are stored under ``root/.spack-db``, which is
+ created if it does not exist. This is the "database directory".
- Caller may optionally provide a custom ``db_dir`` parameter
- where data will be stored. This is intended to be used for
- testing the Database class.
+ The database will attempt to read an ``index.json`` file in the database directory.
+ If that does not exist, it will create a database when needed by scanning the entire
+ store root for ``spec.json`` files according to Spack's directory layout.
- This class supports writing buildcache index files, in which case
- certain fields are not needed in each install record, and no
- transaction locking is required. To use this feature, provide
- ``enable_transaction_locking=False``, and specify a list of needed
- fields in ``record_fields``.
+ Args:
+ root: root directory where to create the database directory.
+ upstream_dbs: upstream databases for this repository.
+ is_upstream: whether this repository is an upstream.
+ lock_cfg: configuration for the locks to be used by this repository.
+ Relevant only if the repository is not an upstream.
"""
self.root = root
-
- # If the db_dir is not provided, default to within the db root.
- self._db_dir = db_dir or os.path.join(self.root, _db_dirname)
+ self.database_directory = os.path.join(self.root, _DB_DIRNAME)
# Set up layout of database files within the db dir
- self._index_path = os.path.join(self._db_dir, "index.json")
- self._verifier_path = os.path.join(self._db_dir, "index_verifier")
- self._lock_path = os.path.join(self._db_dir, "lock")
+ self._index_path = os.path.join(self.database_directory, "index.json")
+ self._verifier_path = os.path.join(self.database_directory, "index_verifier")
+ self._lock_path = os.path.join(self.database_directory, "lock")
# This is for other classes to use to lock prefix directories.
- self.prefix_lock_path = os.path.join(self._db_dir, "prefix_lock")
+ self.prefix_lock_path = os.path.join(self.database_directory, "prefix_lock")
# Ensure a persistent location for dealing with parallel installation
# failures (e.g., across near-concurrent processes).
- self._failure_dir = os.path.join(self._db_dir, "failures")
+ self._failure_dir = os.path.join(self.database_directory, "failures")
# Support special locks for handling parallel installation failures
# of a spec.
- self.prefix_fail_path = os.path.join(self._db_dir, "prefix_failures")
+ self.prefix_fail_path = os.path.join(self.database_directory, "prefix_failures")
# Create needed directories and files
- if not is_upstream and not os.path.exists(self._db_dir):
- fs.mkdirp(self._db_dir)
+ if not is_upstream and not os.path.exists(self.database_directory):
+ fs.mkdirp(self.database_directory)
if not is_upstream and not os.path.exists(self._failure_dir):
fs.mkdirp(self._failure_dir)
@@ -391,10 +422,9 @@ class Database:
self._state_is_inconsistent = False
# initialize rest of state.
- self.db_lock_timeout = spack.config.get("config:db_lock_timeout") or _db_lock_timeout
- self.package_lock_timeout = (
- spack.config.get("config:package_lock_timeout") or _pkg_lock_timeout
- )
+ self.db_lock_timeout = lock_cfg.database_timeout
+ self.package_lock_timeout = lock_cfg.package_timeout
+
tty.debug("DATABASE LOCK TIMEOUT: {0}s".format(str(self.db_lock_timeout)))
timeout_format_str = (
"{0}s".format(str(self.package_lock_timeout))
@@ -403,18 +433,22 @@ class Database:
)
tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))
+ self.lock: Union[ForbiddenLock, lk.Lock]
if self.is_upstream:
self.lock = ForbiddenLock()
else:
self.lock = lk.Lock(
- self._lock_path, default_timeout=self.db_lock_timeout, desc="database"
+ self._lock_path,
+ default_timeout=self.db_lock_timeout,
+ desc="database",
+ enable=lock_cfg.enable,
)
self._data: Dict[str, InstallRecord] = {}
# For every installed spec we keep track of its install prefix, so that
# we can answer the simple query whether a given path is already taken
# before installing a different spec.
- self._installed_prefixes = set()
+ self._installed_prefixes: Set[str] = set()
self.upstream_dbs = list(upstream_dbs) if upstream_dbs else []
@@ -426,14 +460,8 @@ class Database:
# message)
self._fail_when_missing_deps = False
- if enable_transaction_locking:
- self._write_transaction_impl = lk.WriteTransaction
- self._read_transaction_impl = lk.ReadTransaction
- else:
- self._write_transaction_impl = lang.nullcontext
- self._read_transaction_impl = lang.nullcontext
-
- self._record_fields = record_fields
+ self._write_transaction_impl = lk.WriteTransaction
+ self._read_transaction_impl = lk.ReadTransaction
def write_transaction(self):
"""Get a write lock context manager for use in a `with` block."""
@@ -450,7 +478,7 @@ class Database:
return os.path.join(self._failure_dir, "{0}-{1}".format(spec.name, spec.dag_hash()))
- def clear_all_failures(self):
+ def clear_all_failures(self) -> None:
"""Force remove install failure tracking files."""
tty.debug("Releasing prefix failure locks")
for pkg_id in list(self._prefix_failures.keys()):
@@ -468,19 +496,17 @@ class Database:
"Unable to remove failure marking file {0}: {1}".format(fail_mark, str(exc))
)
- def clear_failure(self, spec, force=False):
+ def clear_failure(self, spec: "spack.spec.Spec", force: bool = False) -> None:
"""
Remove any persistent and cached failure tracking for the spec.
see `mark_failed()`.
Args:
- spec (spack.spec.Spec): the spec whose failure indicators are being removed
- force (bool): True if the failure information should be cleared
- when a prefix failure lock exists for the file or False if
- the failure should not be cleared (e.g., it may be
- associated with a concurrent build)
-
+ spec: the spec whose failure indicators are being removed
+ force: True if the failure information should be cleared when a prefix failure
+ lock exists for the file, or False if the failure should not be cleared (e.g.,
+ it may be associated with a concurrent build)
"""
failure_locked = self.prefix_failure_locked(spec)
if failure_locked and not force:
@@ -506,7 +532,7 @@ class Database:
)
)
- def mark_failed(self, spec):
+ def mark_failed(self, spec: "spack.spec.Spec") -> lk.Lock:
"""
Mark a spec as failing to install.
@@ -556,7 +582,7 @@ class Database:
return self._prefix_failures[prefix]
- def prefix_failed(self, spec):
+ def prefix_failed(self, spec: "spack.spec.Spec") -> bool:
"""Return True if the prefix (installation) is marked as failed."""
# The failure was detected in this process.
if spec.prefix in self._prefix_failures:
@@ -571,7 +597,7 @@ class Database:
# spack build process running concurrently.
return self.prefix_failure_marked(spec)
- def prefix_failure_locked(self, spec):
+ def prefix_failure_locked(self, spec: "spack.spec.Spec") -> bool:
"""Return True if a process has a failure lock on the spec."""
check = lk.Lock(
self.prefix_fail_path,
@@ -583,11 +609,11 @@ class Database:
return check.is_write_locked()
- def prefix_failure_marked(self, spec):
+ def prefix_failure_marked(self, spec: "spack.spec.Spec") -> bool:
"""Determine if the spec has a persistent failure marking."""
return os.path.exists(self._failed_spec_path(spec))
- def prefix_lock(self, spec, timeout=None):
+ def prefix_lock(self, spec: "spack.spec.Spec", timeout: Optional[float] = None) -> lk.Lock:
"""Get a lock on a particular spec's installation directory.
NOTE: The installation directory **does not** need to exist.
@@ -659,7 +685,7 @@ class Database:
"""
# map from per-spec hash code to installation record.
installs = dict(
- (k, v.to_dict(include_fields=self._record_fields)) for k, v in self._data.items()
+ (k, v.to_dict(include_fields=self.record_fields)) for k, v in self._data.items()
)
# database includes installation list and version.
@@ -672,7 +698,7 @@ class Database:
"database": {
# TODO: move this to a top-level _meta section if we ever
# TODO: bump the DB version to 7
- "version": str(_db_version),
+ "version": str(_DB_VERSION),
# dictionary of installation records, keyed by DAG hash
"installs": installs,
}
@@ -809,13 +835,13 @@ class Database:
# TODO: better version checking semantics.
version = vn.Version(db["version"])
- if version > _db_version:
- raise InvalidDatabaseVersionError(self, _db_version, version)
- elif version < _db_version:
- if not any(old == version and new == _db_version for old, new in _skip_reindex):
+ if version > _DB_VERSION:
+ raise InvalidDatabaseVersionError(self, _DB_VERSION, version)
+ elif version < _DB_VERSION:
+ if not any(old == version and new == _DB_VERSION for old, new in _SKIP_REINDEX):
tty.warn(
"Spack database version changed from %s to %s. Upgrading."
- % (version, _db_version)
+ % (version, _DB_VERSION)
)
self.reindex(spack.store.layout)
@@ -980,7 +1006,7 @@ class Database:
# applications.
tty.debug("RECONSTRUCTING FROM OLD DB: {0}".format(entry.spec))
try:
- layout = None if entry.spec.external else spack.store.layout
+ layout = None if entry.spec.external else directory_layout
kwargs = {
"spec": entry.spec,
"directory_layout": layout,
@@ -1006,7 +1032,7 @@ class Database:
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
- for dep in rec.spec.dependencies(deptype=_tracked_deps):
+ for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@@ -1095,13 +1121,13 @@ class Database:
):
"""Add an install record for this spec to the database.
- Assumes spec is installed in ``layout.path_for_spec(spec)``.
+ Assumes spec is installed in ``directory_layout.path_for_spec(spec)``.
Also ensures dependencies are present and updated in the DB as
either installed or missing.
Args:
- spec: spec to be added
+ spec (spack.spec.Spec): spec to be added
directory_layout: layout of the spec installation
explicit:
Possible values: True, False, any
@@ -1128,7 +1154,7 @@ class Database:
# Retrieve optional arguments
installation_time = installation_time or _now()
- for edge in spec.edges_to_dependencies(deptype=_tracked_deps):
+ for edge in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
if edge.spec.dag_hash() in self._data:
continue
# allow missing build-only deps. This prevents excessive
@@ -1180,7 +1206,7 @@ class Database:
self._data[key] = InstallRecord(new_spec, path, installed, ref_count=0, **extra_args)
# Connect dependencies from the DB to the new copy.
- for dep in spec.edges_to_dependencies(deptype=_tracked_deps):
+ for dep in spec.edges_to_dependencies(deptype=_TRACKED_DEPENDENCIES):
dkey = dep.spec.dag_hash()
upstream, record = self.query_by_spec_hash(dkey)
new_spec._add_dependency(record.spec, deptypes=dep.deptypes, virtuals=dep.virtuals)
@@ -1243,7 +1269,7 @@ class Database:
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
- for dep in spec.dependencies(deptype=_tracked_deps):
+ for dep in spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
self._decrement_ref_count(dep)
def _increment_ref_count(self, spec):
@@ -1273,8 +1299,8 @@ class Database:
# Remove any reference to this node from dependencies and
# decrement the reference count
- rec.spec.detach(deptype=_tracked_deps)
- for dep in rec.spec.dependencies(deptype=_tracked_deps):
+ rec.spec.detach(deptype=_TRACKED_DEPENDENCIES)
+ for dep in rec.spec.dependencies(deptype=_TRACKED_DEPENDENCIES):
self._decrement_ref_count(dep)
if rec.deprecated_for:
@@ -1390,10 +1416,7 @@ class Database:
@_autospec
def installed_extensions_for(self, extendee_spec):
- """
- Return the specs of all packages that extend
- the given spec
- """
+ """Returns the specs of all packages that extend the given spec"""
for spec in self.query():
if spec.package.extends(extendee_spec):
yield spec.package
@@ -1420,7 +1443,7 @@ class Database:
# nothing found
return default
- def get_by_hash_local(self, *args, **kwargs):
+ def get_by_hash_local(self, dag_hash, default=None, installed=any):
"""Look up a spec in *this DB* by DAG hash, or by a DAG hash prefix.
Arguments:
@@ -1444,7 +1467,7 @@ class Database:
"""
with self.read_transaction():
- return self._get_by_hash_local(*args, **kwargs)
+ return self._get_by_hash_local(dag_hash, default=default, installed=installed)
def get_by_hash(self, dag_hash, default=None, installed=any):
"""Look up a spec by DAG hash, or by a DAG hash prefix.
@@ -1530,7 +1553,7 @@ class Database:
if explicit is not any and rec.explicit != explicit:
continue
- if known is not any and spack.repo.path.exists(rec.spec.name) != known:
+ if known is not any and known(rec.spec.name):
continue
if start_date or end_date:
@@ -1545,7 +1568,7 @@ class Database:
if _query.__doc__ is None:
_query.__doc__ = ""
- _query.__doc__ += _query_docstring
+ _query.__doc__ += _QUERY_DOCSTRING
def query_local(self, *args, **kwargs):
"""Query only the local Spack database.
@@ -1559,7 +1582,7 @@ class Database:
if query_local.__doc__ is None:
query_local.__doc__ = ""
- query_local.__doc__ += _query_docstring
+ query_local.__doc__ += _QUERY_DOCSTRING
def query(self, *args, **kwargs):
"""Query the Spack database including all upstream databases."""
@@ -1578,7 +1601,7 @@ class Database:
if query.__doc__ is None:
query.__doc__ = ""
- query.__doc__ += _query_docstring
+ query.__doc__ += _QUERY_DOCSTRING
def query_one(self, query_spec, known=any, installed=True):
"""Query for exactly one spec that matches the query spec.
diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py
index 1c6129d70e..4b673077ef 100644
--- a/lib/spack/spack/store.py
+++ b/lib/spack/spack/store.py
@@ -19,8 +19,10 @@ debugging easier.
"""
import contextlib
import os
+import pathlib
import re
-from typing import Union
+import uuid
+from typing import Any, Callable, Dict, Generator, List, Optional, Union
import llnl.util.lang
import llnl.util.tty as tty
@@ -33,7 +35,10 @@ import spack.paths
import spack.util.path
#: default installation root, relative to the Spack install path
-default_install_tree_root = os.path.join(spack.paths.opt_path, "spack")
+DEFAULT_INSTALL_TREE_ROOT = os.path.join(spack.paths.opt_path, "spack")
+
+
+ConfigurationType = Union["spack.config.Configuration", "llnl.util.lang.Singleton"]
def parse_install_tree(config_dict):
@@ -79,7 +84,7 @@ def parse_install_tree(config_dict):
projections = {"all": all_projection}
else:
- unpadded_root = install_tree.get("root", default_install_tree_root)
+ unpadded_root = install_tree.get("root", DEFAULT_INSTALL_TREE_ROOT)
unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
padded_length = install_tree.get("padded_length", False)
@@ -123,7 +128,7 @@ def parse_install_tree(config_dict):
else:
root = unpadded_root
- return (root, unpadded_root, projections)
+ return root, unpadded_root, projections
class Store:
@@ -132,95 +137,133 @@ class Store:
Stores consist of packages installed according to a
``DirectoryLayout``, along with an index, or _database_ of their
contents. The directory layout controls what paths look like and how
- Spack ensures that each uniqe spec gets its own unique directory (or
- not, though we don't recommend that). The database is a signle file
+ Spack ensures that each unique spec gets its own unique directory (or
+ not, though we don't recommend that). The database is a single file
that caches metadata for the entire Spack installation. It prevents
us from having to spider the install tree to figure out what's there.
Args:
- root (str): path to the root of the install tree
- unpadded_root (str): path to the root of the install tree without
- padding; the sbang script has to be installed here to work with
- padded roots
- path_scheme (str): expression according to guidelines in
- ``spack.util.path`` that describes how to construct a path to
+ root: path to the root of the install tree
+ unpadded_root: path to the root of the install tree without padding.
+ The sbang script has to be installed here to work with padded roots
+ projections: expression according to guidelines that describes how to construct a path to
a package prefix in this store
- hash_length (int): length of the hashes used in the directory
- layout; spec hash suffixes will be truncated to this length
+ hash_length: length of the hashes used in the directory layout. Spec hash suffixes will be
+ truncated to this length
+ upstreams: optional list of upstream databases
+ lock_cfg: lock configuration for the database
"""
- def __init__(self, root, unpadded_root=None, projections=None, hash_length=None):
+ def __init__(
+ self,
+ root: str,
+ unpadded_root: Optional[str] = None,
+ projections: Optional[Dict[str, str]] = None,
+ hash_length: Optional[int] = None,
+ upstreams: Optional[List[spack.database.Database]] = None,
+ lock_cfg: spack.database.LockConfiguration = spack.database.NO_LOCK,
+ ) -> None:
self.root = root
self.unpadded_root = unpadded_root or root
self.projections = projections
self.hash_length = hash_length
- self.db = spack.database.Database(root, upstream_dbs=retrieve_upstream_dbs())
+ self.upstreams = upstreams
+ self.lock_cfg = lock_cfg
+ self.db = spack.database.Database(root, upstream_dbs=upstreams, lock_cfg=lock_cfg)
self.layout = spack.directory_layout.DirectoryLayout(
root, projections=projections, hash_length=hash_length
)
- def reindex(self):
+ def reindex(self) -> None:
"""Convenience function to reindex the store DB with its own layout."""
return self.db.reindex(self.layout)
- def serialize(self):
- """Return a pickle-able object that can be used to reconstruct
- a store.
- """
- return (self.root, self.unpadded_root, self.projections, self.hash_length)
-
- @staticmethod
- def deserialize(token):
- """Return a store reconstructed from a token created by
- the serialize method.
-
- Args:
- token: return value of the serialize method
-
- Returns:
- Store object reconstructed from the token
- """
- return Store(*token)
+ def __reduce__(self):
+ return Store, (
+ self.root,
+ self.unpadded_root,
+ self.projections,
+ self.hash_length,
+ self.upstreams,
+ self.lock_cfg,
+ )
-def _store():
- """Get the singleton store instance."""
- import spack.bootstrap
+def create(configuration: ConfigurationType) -> Store:
+ """Create a store from the configuration passed as input.
- config_dict = spack.config.get("config")
+ Args:
+ configuration: configuration to create a store.
+ """
+ configuration = configuration or spack.config.config
+ config_dict = configuration.get("config")
root, unpadded_root, projections = parse_install_tree(config_dict)
- hash_length = spack.config.get("config:install_hash_length")
+ hash_length = configuration.get("config:install_hash_length")
+
+ install_roots = [
+ install_properties["install_tree"]
+ for install_properties in configuration.get("upstreams", {}).values()
+ ]
+ upstreams = _construct_upstream_dbs_from_install_roots(install_roots)
return Store(
- root=root, unpadded_root=unpadded_root, projections=projections, hash_length=hash_length
+ root=root,
+ unpadded_root=unpadded_root,
+ projections=projections,
+ hash_length=hash_length,
+ upstreams=upstreams,
+ lock_cfg=spack.database.lock_configuration(configuration),
)
+def _create_global() -> Store:
+ # Check that the user is not trying to install software into the store
+ # reserved by Spack to bootstrap its own dependencies, since this would
+ # lead to bizarre behaviors (e.g. cleaning the bootstrap area would wipe
+ # user installed software)
+ import spack.bootstrap
+
+ enable_bootstrap = spack.config.config.get("bootstrap:enable", True)
+ if enable_bootstrap and spack.bootstrap.store_path() == root:
+ msg = (
+ 'please change the install tree root "{0}" in your '
+ "configuration [path reserved for Spack internal use]"
+ )
+ raise ValueError(msg.format(root))
+ return create(configuration=spack.config.config)
+
+
#: Singleton store instance
-store: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_store)
+store: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)
-def _store_root():
+def _store_root() -> str:
return store.root
-def _store_unpadded_root():
+def _store_unpadded_root() -> str:
return store.unpadded_root
-def _store_db():
+def _store_db() -> spack.database.Database:
return store.db
-def _store_layout():
+def _store_layout() -> spack.directory_layout.DirectoryLayout:
return store.layout
# convenience accessors for parts of the singleton store
-root = llnl.util.lang.LazyReference(_store_root)
-unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root)
-db = llnl.util.lang.LazyReference(_store_db)
-layout = llnl.util.lang.LazyReference(_store_layout)
+root: Union[llnl.util.lang.LazyReference, str] = llnl.util.lang.LazyReference(_store_root)
+unpadded_root: Union[llnl.util.lang.LazyReference, str] = llnl.util.lang.LazyReference(
+ _store_unpadded_root
+)
+db: Union[llnl.util.lang.LazyReference, spack.database.Database] = llnl.util.lang.LazyReference(
+ _store_db
+)
+layout: Union[
+ llnl.util.lang.LazyReference, "spack.directory_layout.DirectoryLayout"
+] = llnl.util.lang.LazyReference(_store_layout)
def reinitialize():
@@ -232,7 +275,7 @@ def reinitialize():
token = store, root, unpadded_root, db, layout
- store = llnl.util.lang.Singleton(_store)
+ store = llnl.util.lang.Singleton(_create_global)
root = llnl.util.lang.LazyReference(_store_root)
unpadded_root = llnl.util.lang.LazyReference(_store_unpadded_root)
db = llnl.util.lang.LazyReference(_store_db)
@@ -248,18 +291,10 @@ def restore(token):
store, root, unpadded_root, db, layout = token
-def retrieve_upstream_dbs():
- other_spack_instances = spack.config.get("upstreams", {})
-
- install_roots = []
- for install_properties in other_spack_instances.values():
- install_roots.append(install_properties["install_tree"])
-
- return _construct_upstream_dbs_from_install_roots(install_roots)
-
-
-def _construct_upstream_dbs_from_install_roots(install_roots, _test=False):
- accumulated_upstream_dbs = []
+def _construct_upstream_dbs_from_install_roots(
+ install_roots: List[str], _test: bool = False
+) -> List[spack.database.Database]:
+ accumulated_upstream_dbs: List[spack.database.Database] = []
for install_root in reversed(install_roots):
upstream_dbs = list(accumulated_upstream_dbs)
next_db = spack.database.Database(
@@ -274,8 +309,13 @@ def _construct_upstream_dbs_from_install_roots(install_roots, _test=False):
return accumulated_upstream_dbs
-def find(constraints, multiple=False, query_fn=None, **kwargs):
- """Return a list of specs matching the constraints passed as inputs.
+def find(
+ constraints: Union[str, List[str], List["spack.spec.Spec"]],
+ multiple: bool = False,
+ query_fn: Optional[Callable[[Any], List["spack.spec.Spec"]]] = None,
+ **kwargs,
+) -> List["spack.spec.Spec"]:
+ """Returns a list of specs matching the constraints passed as inputs.
At least one spec per constraint must match, otherwise the function
will error with an appropriate message.
@@ -287,21 +327,17 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
The query function must accept a spec as its first argument.
Args:
- constraints (List[spack.spec.Spec]): specs to be matched against
- installed packages
- multiple (bool): if True multiple matches per constraint are admitted
+ constraints: spec(s) to be matched against installed packages
+ multiple: if True multiple matches per constraint are admitted
query_fn (Callable): query function to get matching specs. By default,
``spack.store.db.query``
**kwargs: keyword arguments forwarded to the query function
-
- Return:
- List of matching specs
"""
- # Normalize input to list of specs
if isinstance(constraints, str):
constraints = [spack.spec.Spec(constraints)]
- matching_specs, errors = [], []
+ matching_specs: List[spack.spec.Spec] = []
+ errors = []
query_fn = query_fn or spack.store.db.query
for spec in constraints:
current_matches = query_fn(spec, **kwargs)
@@ -327,39 +363,45 @@ def find(constraints, multiple=False, query_fn=None, **kwargs):
return matching_specs
-def specfile_matches(filename, **kwargs):
+def specfile_matches(filename: str, **kwargs) -> List["spack.spec.Spec"]:
"""Same as find but reads the query from a spec file.
Args:
- filename (str): YAML or JSON file from which to read the query.
+ filename: YAML or JSON file from which to read the query.
**kwargs: keyword arguments forwarded to "find"
-
- Return:
- List of matches
"""
query = [spack.spec.Spec.from_specfile(filename)]
return spack.store.find(query, **kwargs)
@contextlib.contextmanager
-def use_store(store_or_path):
+def use_store(
+ path: Union[str, pathlib.Path], extra_data: Optional[Dict[str, Any]] = None
+) -> Generator[Store, None, None]:
"""Use the store passed as argument within the context manager.
Args:
- store_or_path: either a Store object ot a path to where the store resides
+ path: path to the store.
+ extra_data: extra configuration under "config:install_tree" to be
+ taken into account.
- Returns:
+ Yields:
Store object associated with the context manager's store
"""
global store, db, layout, root, unpadded_root
- # Normalize input arguments
- temporary_store = store_or_path
- if not isinstance(store_or_path, Store):
- temporary_store = Store(store_or_path)
+ assert not isinstance(path, Store), "cannot pass a store anymore"
+ scope_name = "use-store-{}".format(uuid.uuid4())
+ data = {"root": str(path)}
+ if extra_data:
+ data.update(extra_data)
# Swap the store with the one just constructed and return it
_ = store.db
+ spack.config.config.push_scope(
+ spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
+ )
+ temporary_store = create(configuration=spack.config.config)
original_store, store = store, temporary_store
db, layout = store.db, store.layout
root, unpadded_root = store.root, store.unpadded_root
@@ -371,6 +413,7 @@ def use_store(store_or_path):
store = original_store
db, layout = original_store.db, original_store.layout
root, unpadded_root = original_store.root, original_store.unpadded_root
+ spack.config.config.remove_scope(scope_name=scope_name)
class MatchError(spack.error.SpackError):
diff --git a/lib/spack/spack/subprocess_context.py b/lib/spack/spack/subprocess_context.py
index 8283a3d3ba..86a7f15ebe 100644
--- a/lib/spack/spack/subprocess_context.py
+++ b/lib/spack/spack/subprocess_context.py
@@ -27,7 +27,7 @@ import spack.platforms
import spack.repo
import spack.store
-_serialize = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
+_SERIALIZE = sys.platform == "win32" or (sys.version_info >= (3, 8) and sys.platform == "darwin")
patches = None
@@ -66,7 +66,7 @@ class PackageInstallContext:
"""
def __init__(self, pkg):
- if _serialize:
+ if _SERIALIZE:
self.serialized_pkg = serialize(pkg)
self.serialized_env = serialize(spack.environment.active_environment())
else:
@@ -78,8 +78,8 @@ class PackageInstallContext:
def restore(self):
self.test_state.restore()
spack.main.spack_working_dir = self.spack_working_dir
- env = pickle.load(self.serialized_env) if _serialize else self.env
- pkg = pickle.load(self.serialized_pkg) if _serialize else self.pkg
+ env = pickle.load(self.serialized_env) if _SERIALIZE else self.env
+ pkg = pickle.load(self.serialized_pkg) if _SERIALIZE else self.pkg
if env:
spack.environment.activate(env)
return pkg
@@ -93,25 +93,23 @@ class TestState:
"""
def __init__(self):
- if _serialize:
- self.repo_dirs = list(r.root for r in spack.repo.path.repos)
+ if _SERIALIZE:
self.config = spack.config.config
self.platform = spack.platforms.host
self.test_patches = store_patches()
- self.store_token = spack.store.store.serialize()
+ self.store = spack.store.store
def restore(self):
- if _serialize:
+ if _SERIALIZE:
spack.config.config = self.config
- spack.repo.path = spack.repo._path(self.config)
+ spack.repo.path = spack.repo.create(self.config)
spack.platforms.host = self.platform
- new_store = spack.store.Store.deserialize(self.store_token)
- spack.store.store = new_store
- spack.store.root = new_store.root
- spack.store.unpadded_root = new_store.unpadded_root
- spack.store.db = new_store.db
- spack.store.layout = new_store.layout
+ spack.store.store = self.store
+ spack.store.root = self.store.root
+ spack.store.unpadded_root = self.store.unpadded_root
+ spack.store.db = self.store.db
+ spack.store.layout = self.store.layout
self.test_patches.restore()
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index ed60a1056a..d791e19bd2 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -479,9 +479,6 @@ def test_update_sbang(tmpdir, test_mirror):
into the non-default directory layout scheme, triggering an update of the
sbang.
"""
- scheme = os.path.join(
- "${name}", "${version}", "${architecture}-${compiler.name}-${compiler.version}-${hash}"
- )
spec_str = "old-sbang"
# Concretize a package with some old-fashioned sbang lines.
old_spec = Spec(spec_str).concretized()
@@ -504,12 +501,8 @@ def test_update_sbang(tmpdir, test_mirror):
# Switch the store to the new install tree locations
newtree_dir = tmpdir.join("newtree")
- s = spack.store.Store(str(newtree_dir))
- s.layout = DirectoryLayout(str(newtree_dir), path_scheme=scheme)
-
- with spack.store.use_store(s):
- new_spec = Spec("old-sbang")
- new_spec.concretize()
+ with spack.store.use_store(str(newtree_dir)):
+ new_spec = Spec("old-sbang").concretized()
assert new_spec.dag_hash() == old_spec.dag_hash()
# Install package from buildcache
diff --git a/lib/spack/spack/test/cmd/bootstrap.py b/lib/spack/spack/test/cmd/bootstrap.py
index d0b572e21c..ac86729a11 100644
--- a/lib/spack/spack/test/cmd/bootstrap.py
+++ b/lib/spack/spack/test/cmd/bootstrap.py
@@ -99,7 +99,7 @@ def test_reset_in_file_scopes_overwrites_backup_files(mutable_config):
assert os.path.exists(backup_file)
-def test_list_sources(capsys):
+def test_list_sources(config, capsys):
# Get the merged list and ensure we get our defaults
with capsys.disabled():
output = _bootstrap("list")
diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py
index 8fe919d00e..6eed2dfc74 100644
--- a/lib/spack/spack/test/cmd/env.py
+++ b/lib/spack/spack/test/cmd/env.py
@@ -150,7 +150,7 @@ def test_env_list(mutable_mock_env_path):
assert "baz" in out
# make sure `spack env list` skips invalid things in var/spack/env
- mutable_mock_env_path.join(".DS_Store").ensure(file=True)
+ (mutable_mock_env_path / ".DS_Store").touch()
out = env("list")
assert "foo" in out
@@ -1118,12 +1118,12 @@ def test_uninstall_removes_from_env(mock_stage, mock_fetch, install_mockery):
@pytest.mark.usefixtures("config")
-def test_indirect_build_dep(tmpdir):
+def test_indirect_build_dep(tmp_path):
"""Simple case of X->Y->Z where Y is a build/link dep and Z is a
build-only dep. Make sure this concrete DAG is preserved when writing the
environment out and reading it back.
"""
- builder = spack.repo.MockRepositoryBuilder(tmpdir)
+ builder = spack.repo.MockRepositoryBuilder(tmp_path / "repo")
builder.add_package("z")
builder.add_package("y", dependencies=[("z", "build", None)])
builder.add_package("x", dependencies=[("y", None, None)])
@@ -1146,7 +1146,7 @@ def test_indirect_build_dep(tmpdir):
@pytest.mark.usefixtures("config")
-def test_store_different_build_deps(tmpdir):
+def test_store_different_build_deps(tmp_path):
r"""Ensure that an environment can store two instances of a build-only
dependency::
@@ -1157,7 +1157,7 @@ def test_store_different_build_deps(tmpdir):
z1
"""
- builder = spack.repo.MockRepositoryBuilder(tmpdir)
+ builder = spack.repo.MockRepositoryBuilder(tmp_path / "mirror")
builder.add_package("z")
builder.add_package("y", dependencies=[("z", "build", None)])
builder.add_package("x", dependencies=[("y", None, None), ("z", "build", None)])
@@ -3350,12 +3350,11 @@ def test_relative_view_path_on_command_line_is_made_absolute(tmp_path, config):
assert os.path.samefile("view", environment.default_view.root)
-def test_environment_created_in_users_location(mutable_config, tmpdir):
+def test_environment_created_in_users_location(mutable_mock_env_path, tmp_path):
"""Test that an environment is created in a location based on the config"""
- spack.config.set("config:environments_root", str(tmpdir.join("envs")))
- env_dir = spack.config.get("config:environments_root")
+ env_dir = str(mutable_mock_env_path)
- assert tmpdir.strpath in env_dir
+ assert str(tmp_path) in env_dir
assert not os.path.isdir(env_dir)
dir_name = "user_env"
diff --git a/lib/spack/spack/test/cmd/location.py b/lib/spack/spack/test/cmd/location.py
index 1e42141199..28b68ab8ad 100644
--- a/lib/spack/spack/test/cmd/location.py
+++ b/lib/spack/spack/test/cmd/location.py
@@ -97,7 +97,7 @@ def test_location_with_active_env(mutable_mock_env_path):
assert location("--env").strip() == e.path
-def test_location_env_flag_interference(mutable_mock_env_path, tmpdir):
+def test_location_env_flag_interference(mutable_mock_env_path):
"""
Tests that specifying an active environment using `spack -e x location ...`
does not interfere with the location command flags.
diff --git a/lib/spack/spack/test/cmd/mirror.py b/lib/spack/spack/test/cmd/mirror.py
index 0b9697976f..1ae38d5d98 100644
--- a/lib/spack/spack/test/cmd/mirror.py
+++ b/lib/spack/spack/test/cmd/mirror.py
@@ -36,8 +36,8 @@ def test_regression_8083(tmpdir, capfd, mock_packages, mock_fetch, config):
@pytest.mark.regression("12345")
-def test_mirror_from_env(tmpdir, mock_packages, mock_fetch, config, mutable_mock_env_path):
- mirror_dir = str(tmpdir)
+def test_mirror_from_env(tmp_path, mock_packages, mock_fetch, config, mutable_mock_env_path):
+ mirror_dir = str(tmp_path / "mirror")
env_name = "test"
env("create", env_name)
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index 3137d727a5..8191fabc9a 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -467,7 +467,7 @@ full_padded_string = os.path.join(os.sep + "path", os.sep.join(reps))[:MAX_PADDE
],
)
def test_parse_install_tree(config_settings, expected, mutable_config):
- expected_root = expected[0] or spack.store.default_install_tree_root
+ expected_root = expected[0] or spack.store.DEFAULT_INSTALL_TREE_ROOT
expected_unpadded_root = expected[1] or expected_root
expected_proj = expected[2] or spack.directory_layout.default_projections
@@ -522,7 +522,7 @@ def test_parse_install_tree(config_settings, expected, mutable_config):
],
)
def test_parse_install_tree_padded(config_settings, expected, mutable_config):
- expected_root = expected[0] or spack.store.default_install_tree_root
+ expected_root = expected[0] or spack.store.DEFAULT_INSTALL_TREE_ROOT
expected_unpadded_root = expected[1] or expected_root
expected_proj = expected[2] or spack.directory_layout.default_projections
@@ -1230,21 +1230,21 @@ def test_default_install_tree(monkeypatch):
def test_local_config_can_be_disabled(working_env):
os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = "true"
- cfg = spack.config._config()
+ cfg = spack.config.create()
assert "defaults" in cfg.scopes
assert "system" not in cfg.scopes
assert "site" in cfg.scopes
assert "user" not in cfg.scopes
os.environ["SPACK_DISABLE_LOCAL_CONFIG"] = ""
- cfg = spack.config._config()
+ cfg = spack.config.create()
assert "defaults" in cfg.scopes
assert "system" not in cfg.scopes
assert "site" in cfg.scopes
assert "user" not in cfg.scopes
del os.environ["SPACK_DISABLE_LOCAL_CONFIG"]
- cfg = spack.config._config()
+ cfg = spack.config.create()
assert "defaults" in cfg.scopes
assert "system" in cfg.scopes
assert "site" in cfg.scopes
diff --git a/lib/spack/spack/test/config_values.py b/lib/spack/spack/test/config_values.py
index 163634ea4e..618d2c40a8 100644
--- a/lib/spack/spack/test/config_values.py
+++ b/lib/spack/spack/test/config_values.py
@@ -13,12 +13,7 @@ import spack.store
@pytest.mark.usefixtures("mock_packages")
def test_set_install_hash_length(hash_length, mutable_config, tmpdir):
mutable_config.set("config:install_hash_length", hash_length)
- mutable_config.set("config:install_tree", {"root": str(tmpdir)})
- # The call below is to reinitialize the directory layout associated
- # with the store according to the configuration changes above (i.e.
- # with the shortened hash)
- store = spack.store._store()
- with spack.store.use_store(store):
+ with spack.store.use_store(str(tmpdir)):
spec = spack.spec.Spec("libelf").concretized()
prefix = spec.prefix
hash_str = prefix.rsplit("-")[-1]
@@ -28,14 +23,7 @@ def test_set_install_hash_length(hash_length, mutable_config, tmpdir):
@pytest.mark.usefixtures("mock_packages")
def test_set_install_hash_length_upper_case(mutable_config, tmpdir):
mutable_config.set("config:install_hash_length", 5)
- mutable_config.set(
- "config:install_tree", {"root": str(tmpdir), "projections": {"all": "{name}-{HASH}"}}
- )
- # The call below is to reinitialize the directory layout associated
- # with the store according to the configuration changes above (i.e.
- # with the shortened hash and projection)
- store = spack.store._store()
- with spack.store.use_store(store):
+ with spack.store.use_store(str(tmpdir), extra_data={"projections": {"all": "{name}-{HASH}"}}):
spec = spack.spec.Spec("libelf").concretized()
prefix = spec.prefix
hash_str = prefix.rsplit("-")[-1]
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 2582d1255d..2840f4dd78 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -950,7 +950,7 @@ def disable_compiler_execution(monkeypatch, request):
@pytest.fixture(scope="function")
-def install_mockery(temporary_store, config, mock_packages):
+def install_mockery(temporary_store, mutable_config, mock_packages):
"""Hooks a fake install directory, DB, and stage directory into Spack."""
# We use a fake package, so temporarily disable checksumming
with spack.config.override("config:checksum", False):
@@ -968,8 +968,9 @@ def install_mockery(temporary_store, config, mock_packages):
@pytest.fixture(scope="function")
-def temporary_store(tmpdir):
+def temporary_store(tmpdir, request):
"""Hooks a temporary empty store for the test function."""
+ ensure_configuration_fixture_run_before(request)
temporary_store_path = tmpdir.join("opt")
with spack.store.use_store(str(temporary_store_path)) as s:
yield s
@@ -1536,13 +1537,12 @@ def mock_svn_repository(tmpdir_factory):
@pytest.fixture(scope="function")
-def mutable_mock_env_path(tmpdir_factory, mutable_config):
+def mutable_mock_env_path(tmp_path, mutable_config, monkeypatch):
"""Fixture for mocking the internal spack environments directory."""
- saved_path = ev.environment.default_env_path
- mock_path = tmpdir_factory.mktemp("mock-env-path")
- ev.environment.default_env_path = str(mock_path)
- yield mock_path
- ev.environment.default_env_path = saved_path
+ mock_path = tmp_path / "mock-env-path"
+ mutable_config.set("config:environments_root", str(mock_path))
+ monkeypatch.setattr(ev.environment, "default_env_path", str(mock_path))
+ return mock_path
@pytest.fixture()
@@ -1938,3 +1938,12 @@ def shell_as(shell):
# restore old shell if one was set
if _shell:
os.environ["SPACK_SHELL"] = _shell
+
+
+@pytest.fixture()
+def nullify_globals(request, monkeypatch):
+ ensure_configuration_fixture_run_before(request)
+ monkeypatch.setattr(spack.config, "config", None)
+ monkeypatch.setattr(spack.caches, "misc_cache", None)
+ monkeypatch.setattr(spack.repo, "path", None)
+ monkeypatch.setattr(spack.store, "store", None)
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index 368b2c1b72..05c61c325a 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -1060,8 +1060,18 @@ def test_error_message_when_using_too_new_db(database, monkeypatch):
back to an older version of Spack. This test ensures that the error message for a too
new database version stays comprehensible across refactoring of the database code.
"""
- monkeypatch.setattr(spack.database, "_db_version", vn.Version("0"))
+ monkeypatch.setattr(spack.database, "_DB_VERSION", vn.Version("0"))
with pytest.raises(
spack.database.InvalidDatabaseVersionError, match="you need a newer Spack version"
):
spack.database.Database(database.root)._read()
+
+
+@pytest.mark.parametrize(
+ "lock_cfg",
+ [spack.database.NO_LOCK, spack.database.NO_TIMEOUT, spack.database.DEFAULT_LOCK_CFG, None],
+)
+def test_database_construction_doesnt_use_globals(tmpdir, config, nullify_globals, lock_cfg):
+ lock_cfg = lock_cfg or spack.database.lock_configuration(config)
+ db = spack.database.Database(str(tmpdir), lock_cfg=lock_cfg)
+ assert os.path.exists(db.database_directory)
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index 63dcd47f4c..8b1e91671d 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -288,17 +288,19 @@ def install_upstream(tmpdir_factory, gen_mock_layout, install_mockery):
mock_db_root = str(tmpdir_factory.mktemp("mock_db_root"))
prepared_db = spack.database.Database(mock_db_root)
upstream_layout = gen_mock_layout("/a/")
+ spack.config.config.push_scope(
+ spack.config.InternalConfigScope(
+ name="install-upstream-fixture",
+ data={"upstreams": {"mock1": {"install_tree": prepared_db.root}}},
+ )
+ )
def _install_upstream(*specs):
for spec_str in specs:
s = spack.spec.Spec(spec_str).concretized()
prepared_db.add(s, upstream_layout)
-
downstream_root = str(tmpdir_factory.mktemp("mock_downstream_db_root"))
- db_for_test = spack.database.Database(downstream_root, upstream_dbs=[prepared_db])
- store = spack.store.Store(downstream_root)
- store.db = db_for_test
- return store, upstream_layout
+ return downstream_root, upstream_layout
return _install_upstream
@@ -307,8 +309,8 @@ def test_installed_upstream_external(install_upstream, mock_fetch):
"""Check that when a dependency package is recorded as installed in
an upstream database that it is not reinstalled.
"""
- s, _ = install_upstream("externaltool")
- with spack.store.use_store(s):
+ store_root, _ = install_upstream("externaltool")
+ with spack.store.use_store(store_root):
dependent = spack.spec.Spec("externaltest")
dependent.concretize()
@@ -326,8 +328,8 @@ def test_installed_upstream(install_upstream, mock_fetch):
"""Check that when a dependency package is recorded as installed in
an upstream database that it is not reinstalled.
"""
- s, upstream_layout = install_upstream("dependency-install")
- with spack.store.use_store(s):
+ store_root, upstream_layout = install_upstream("dependency-install")
+ with spack.store.use_store(store_root):
dependency = spack.spec.Spec("dependency-install").concretized()
dependent = spack.spec.Spec("dependent-install").concretized()
@@ -379,9 +381,8 @@ def test_install_prefix_collision_fails(config, mock_fetch, mock_packages, tmpdi
Test that different specs with coinciding install prefixes will fail
to install.
"""
- projections = {"all": "all-specs-project-to-this-prefix"}
- store = spack.store.Store(str(tmpdir), projections=projections)
- with spack.store.use_store(store):
+ projections = {"projections": {"all": "all-specs-project-to-this-prefix"}}
+ with spack.store.use_store(str(tmpdir), extra_data=projections):
with spack.config.override("config:checksum", False):
pkg_a = Spec("libelf@0.8.13").concretized().package
pkg_b = Spec("libelf@0.8.12").concretized().package
diff --git a/lib/spack/spack/test/repo.py b/lib/spack/spack/test/repo.py
index e5c5b00693..58f7351f30 100644
--- a/lib/spack/spack/test/repo.py
+++ b/lib/spack/spack/test/repo.py
@@ -123,12 +123,11 @@ def test_relative_import_spack_packages_as_python_modules(mock_packages):
def test_all_virtual_packages_have_default_providers():
"""All virtual packages must have a default provider explicitly set."""
- defaults = spack.config.get("packages", scope="defaults")
+ configuration = spack.config.create()
+ defaults = configuration.get("packages", scope="defaults")
default_providers = defaults["all"]["providers"]
providers = spack.repo.path.provider_index.providers
- default_providers_filename = spack.config.config.scopes["defaults"].get_section_filename(
- "packages"
- )
+ default_providers_filename = configuration.scopes["defaults"].get_section_filename("packages")
for provider in providers:
assert provider in default_providers, (
"all providers must have a default in %s" % default_providers_filename
@@ -167,3 +166,18 @@ def test_repo_dump_virtuals(tmpdir, mutable_mock_repo, mock_packages, ensure_deb
captured = capsys.readouterr()[1]
assert "Installing" in captured
assert "package.py" in os.listdir(tmpdir), "Expected the virtual's package to be copied"
+
+
+@pytest.mark.parametrize(
+ "repo_paths,namespaces",
+ [
+ ([spack.paths.packages_path], ["builtin"]),
+ ([spack.paths.mock_packages_path], ["builtin.mock"]),
+ ([spack.paths.packages_path, spack.paths.mock_packages_path], ["builtin", "builtin.mock"]),
+ ([spack.paths.mock_packages_path, spack.paths.packages_path], ["builtin.mock", "builtin"]),
+ ],
+)
+def test_repository_construction_doesnt_use_globals(nullify_globals, repo_paths, namespaces):
+ repo_path = spack.repo.RepoPath(*repo_paths)
+ assert len(repo_path.repos) == len(namespaces)
+ assert [x.namespace for x in repo_path.repos] == namespaces
diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py
index 7a1645d7fe..cd380fd35d 100644
--- a/lib/spack/spack/test/sbang.py
+++ b/lib/spack/spack/test/sbang.py
@@ -368,7 +368,7 @@ def test_install_sbang_too_long(tmpdir):
add = min(num_extend, 255)
long_path = os.path.join(long_path, "e" * add)
num_extend -= add
- with spack.store.use_store(spack.store.Store(long_path)):
+ with spack.store.use_store(long_path):
with pytest.raises(sbang.SbangPathError) as exc_info:
sbang.sbang_install_path()
diff --git a/lib/spack/spack/test/util/spack_lock_wrapper.py b/lib/spack/spack/test/util/spack_lock_wrapper.py
index cee8b29232..0909ce06c3 100644
--- a/lib/spack/spack/test/util/spack_lock_wrapper.py
+++ b/lib/spack/spack/test/util/spack_lock_wrapper.py
@@ -17,25 +17,19 @@ import spack.util.lock as lk
def test_disable_locking(tmpdir):
"""Ensure that locks do no real locking when disabled."""
lock_path = str(tmpdir.join("lockfile"))
+ lock = lk.Lock(lock_path, enable=False)
- old_value = spack.config.get("config:locks")
+ lock.acquire_read()
+ assert not os.path.exists(lock_path)
- with spack.config.override("config:locks", False):
- lock = lk.Lock(lock_path)
+ lock.acquire_write()
+ assert not os.path.exists(lock_path)
- lock.acquire_read()
- assert not os.path.exists(lock_path)
+ lock.release_write()
+ assert not os.path.exists(lock_path)
- lock.acquire_write()
- assert not os.path.exists(lock_path)
-
- lock.release_write()
- assert not os.path.exists(lock_path)
-
- lock.release_read()
- assert not os.path.exists(lock_path)
-
- assert old_value == spack.config.get("config:locks")
+ lock.release_read()
+ assert not os.path.exists(lock_path)
# "Disable" mock_stage fixture to avoid subdir permissions issues on cleanup.
diff --git a/lib/spack/spack/test/util/spack_yaml.py b/lib/spack/spack/test/util/spack_yaml.py
index 9beccdec71..f7c8851d27 100644
--- a/lib/spack/spack/test/util/spack_yaml.py
+++ b/lib/spack/spack/test/util/spack_yaml.py
@@ -86,11 +86,13 @@ def test_config_blame_defaults():
if match:
filename, line, key, val = match.groups()
line = int(line)
- val = val.strip("'\"")
+ lines = get_file_lines(filename)
+ assert key in lines[line]
+ val = val.strip("'\"")
+ printed_line = lines[line]
if val.lower() in ("true", "false"):
val = val.lower()
+ printed_line = printed_line.lower()
- lines = get_file_lines(filename)
- assert key in lines[line], filename
- assert val in lines[line]
+ assert val in printed_line, filename
diff --git a/lib/spack/spack/util/lock.py b/lib/spack/spack/util/lock.py
index 7d8036bb35..eb5aaa57d6 100644
--- a/lib/spack/spack/util/lock.py
+++ b/lib/spack/spack/util/lock.py
@@ -17,7 +17,6 @@ from llnl.util.lock import LockUpgradeError # noqa: F401
from llnl.util.lock import ReadTransaction # noqa: F401
from llnl.util.lock import WriteTransaction # noqa: F401
-import spack.config
import spack.error
import spack.paths
@@ -31,8 +30,13 @@ class Lock(llnl.util.lock.Lock):
"""
def __init__(self, *args, **kwargs):
- super().__init__(*args, **kwargs)
- self._enable = spack.config.get("config:locks", sys.platform != "win32")
+ enable_lock = kwargs.pop("enable", None)
+ if sys.platform == "win32":
+ enable_lock = False
+ elif sys.platform != "win32" and enable_lock is None:
+ enable_lock = True
+ self._enable = enable_lock
+ super(Lock, self).__init__(*args, **kwargs)
def _lock(self, op, timeout=0):
if self._enable: