summaryrefslogtreecommitdiff
path: root/lib/spack/spack/store.py
blob: dc2d5de54b4372b3d1ab793b96611ab681909bcb (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

"""Components that manage Spack's installation tree.

An install tree, or "build store" consists of two parts:

  1. A package database that tracks what is installed.
  2. A directory layout that determines how the installations
     are laid out.

The store contains all the install prefixes for packages installed by
Spack.  The simplest store could just contain prefixes named by DAG hash,
but we use a fancier directory layout to make browsing the store and
debugging easier.

"""
import contextlib
import os
import pathlib
import re
import uuid
from typing import Any, Callable, Dict, Generator, List, Optional, Union

import llnl.util.lang
from llnl.util import tty

import spack.config
import spack.database
import spack.directory_layout
import spack.error
import spack.paths
import spack.spec
import spack.util.path

#: default installation root, relative to the Spack install path
DEFAULT_INSTALL_TREE_ROOT = os.path.join(spack.paths.opt_path, "spack")


ConfigurationType = Union["spack.config.Configuration", "llnl.util.lang.Singleton"]


def parse_install_tree(config_dict):
    """Parse config settings and return values relevant to the store object.

    Arguments:
        config_dict (dict): dictionary of config values, as returned from
            spack.config.get('config')

    Returns:
        (tuple): triple of the install tree root, the unpadded install tree
            root (before padding was applied), and the projections for the
            install tree

    Encapsulate backwards compatibility capabilities for install_tree
    and deprecated values that are now parsed as part of install_tree.
    """
    # The following two configs are equivalent, the first being the old format
    # and the second the new format. The new format is also more flexible.

    # config:
    #   install_tree: /path/to/root$padding:128
    #   install_path_scheme: '{name}-{version}'

    # config:
    #   install_tree:
    #     root: /path/to/root
    #     padding: 128
    #     projections:
    #       all: '{name}-{version}'

    install_tree = config_dict.get("install_tree", {})

    padded_length = False
    if isinstance(install_tree, str):
        tty.warn("Using deprecated format for configuring install_tree")
        unpadded_root = install_tree
        unpadded_root = spack.util.path.canonicalize_path(unpadded_root)
        # construct projection from previous values for backwards compatibility
        all_projection = config_dict.get(
            "install_path_scheme", spack.directory_layout.default_projections["all"]
        )

        projections = {"all": all_projection}
    else:
        unpadded_root = install_tree.get("root", DEFAULT_INSTALL_TREE_ROOT)
        unpadded_root = spack.util.path.canonicalize_path(unpadded_root)

        padded_length = install_tree.get("padded_length", False)
        if padded_length is True:
            padded_length = spack.util.path.get_system_path_max()
            padded_length -= spack.util.path.SPACK_MAX_INSTALL_PATH_LENGTH

        projections = install_tree.get("projections", spack.directory_layout.default_projections)

        path_scheme = config_dict.get("install_path_scheme", None)
        if path_scheme:
            tty.warn(
                "Deprecated config value 'install_path_scheme' ignored"
                " when using new install_tree syntax"
            )

    # Handle backwards compatibility for padding
    old_pad = re.search(r"\$padding(:\d+)?|\${padding(:\d+)?}", unpadded_root)
    if old_pad:
        if padded_length:
            msg = "Ignoring deprecated padding option in install_tree root "
            msg += "because new syntax padding is present."
            tty.warn(msg)
        else:
            unpadded_root = unpadded_root.replace(old_pad.group(0), "")
            if old_pad.group(1) or old_pad.group(2):
                length_group = 2 if "{" in old_pad.group(0) else 1
                padded_length = int(old_pad.group(length_group)[1:])
            else:
                padded_length = spack.util.path.get_system_path_max()
                padded_length -= spack.util.path.SPACK_MAX_INSTALL_PATH_LENGTH

    unpadded_root = unpadded_root.rstrip(os.path.sep)

    if padded_length:
        root = spack.util.path.add_padding(unpadded_root, padded_length)
        if len(root) != padded_length:
            msg = "Cannot pad %s to %s characters." % (root, padded_length)
            msg += " It is already %s characters long" % len(root)
            tty.warn(msg)
    else:
        root = unpadded_root

    return root, unpadded_root, projections


class Store:
    """A store is a path full of installed Spack packages.

    Stores consist of packages installed according to a ``DirectoryLayout``, along with a database
    of their contents.

    The directory layout controls what paths look like and how Spack ensures that each unique spec
    gets its own unique directory (or not, though we don't recommend that).

    The database is a single file that caches metadata for the entire Spack installation. It
    prevents us from having to spider the install tree to figure out what's there.

    The store is also able to lock installation prefixes, and to mark installation failures.

    Args:
        root: path to the root of the install tree
        unpadded_root: path to the root of the install tree without padding. The sbang script has
            to be installed here to work with padded roots
        projections: expression according to guidelines that describes how to construct a path to
            a package prefix in this store
        hash_length: length of the hashes used in the directory layout. Spec hash suffixes will be
            truncated to this length
        upstreams: optional list of upstream databases
        lock_cfg: lock configuration for the database
    """

    def __init__(
        self,
        root: str,
        unpadded_root: Optional[str] = None,
        projections: Optional[Dict[str, str]] = None,
        hash_length: Optional[int] = None,
        upstreams: Optional[List[spack.database.Database]] = None,
        lock_cfg: spack.database.LockConfiguration = spack.database.NO_LOCK,
    ) -> None:
        self.root = root
        self.unpadded_root = unpadded_root or root
        self.projections = projections
        self.hash_length = hash_length
        self.upstreams = upstreams
        self.lock_cfg = lock_cfg
        self.db = spack.database.Database(root, upstream_dbs=upstreams, lock_cfg=lock_cfg)

        timeout_format_str = (
            f"{str(lock_cfg.package_timeout)}s" if lock_cfg.package_timeout else "No timeout"
        )
        tty.debug("PACKAGE LOCK TIMEOUT: {0}".format(str(timeout_format_str)))

        self.prefix_locker = spack.database.SpecLocker(
            spack.database.prefix_lock_path(root), default_timeout=lock_cfg.package_timeout
        )
        self.failure_tracker = spack.database.FailureTracker(
            self.root, default_timeout=lock_cfg.package_timeout
        )

        self.layout = spack.directory_layout.DirectoryLayout(
            root, projections=projections, hash_length=hash_length
        )

    def reindex(self) -> None:
        """Convenience function to reindex the store DB with its own layout."""
        return self.db.reindex(self.layout)

    def __reduce__(self):
        return Store, (
            self.root,
            self.unpadded_root,
            self.projections,
            self.hash_length,
            self.upstreams,
            self.lock_cfg,
        )


def create(configuration: ConfigurationType) -> Store:
    """Create a store from the configuration passed as input.

    Args:
        configuration: configuration to create a store.
    """
    configuration = configuration or spack.config.CONFIG
    config_dict = configuration.get("config")
    root, unpadded_root, projections = parse_install_tree(config_dict)
    hash_length = configuration.get("config:install_hash_length")

    install_roots = [
        install_properties["install_tree"]
        for install_properties in configuration.get("upstreams", {}).values()
    ]
    upstreams = _construct_upstream_dbs_from_install_roots(install_roots)

    return Store(
        root=root,
        unpadded_root=unpadded_root,
        projections=projections,
        hash_length=hash_length,
        upstreams=upstreams,
        lock_cfg=spack.database.lock_configuration(configuration),
    )


def _create_global() -> Store:
    result = create(configuration=spack.config.CONFIG)
    return result


#: Singleton store instance
STORE: Union[Store, llnl.util.lang.Singleton] = llnl.util.lang.Singleton(_create_global)


def reinitialize():
    """Restore globals to the same state they would have at start-up. Return a token
    containing the state of the store before reinitialization.
    """
    global STORE

    token = STORE
    STORE = llnl.util.lang.Singleton(_create_global)

    return token


def restore(token):
    """Restore the environment from a token returned by reinitialize"""
    global STORE
    STORE = token


def _construct_upstream_dbs_from_install_roots(
    install_roots: List[str], _test: bool = False
) -> List[spack.database.Database]:
    accumulated_upstream_dbs: List[spack.database.Database] = []
    for install_root in reversed(install_roots):
        upstream_dbs = list(accumulated_upstream_dbs)
        next_db = spack.database.Database(
            spack.util.path.canonicalize_path(install_root),
            is_upstream=True,
            upstream_dbs=upstream_dbs,
        )
        next_db._fail_when_missing_deps = _test
        next_db._read()
        accumulated_upstream_dbs.insert(0, next_db)

    return accumulated_upstream_dbs


def find(
    constraints: Union[str, List[str], List["spack.spec.Spec"]],
    multiple: bool = False,
    query_fn: Optional[Callable[[Any], List["spack.spec.Spec"]]] = None,
    **kwargs,
) -> List["spack.spec.Spec"]:
    """Returns a list of specs matching the constraints passed as inputs.

    At least one spec per constraint must match, otherwise the function
    will error with an appropriate message.

    By default, this function queries the current store, but a custom query
    function can be passed to hit any other source of concretized specs
    (e.g. a binary cache).

    The query function must accept a spec as its first argument.

    Args:
        constraints: spec(s) to be matched against installed packages
        multiple: if True multiple matches per constraint are admitted
        query_fn (Callable): query function to get matching specs. By default,
            ``spack.store.STORE.db.query``
        **kwargs: keyword arguments forwarded to the query function
    """
    if isinstance(constraints, str):
        constraints = [spack.spec.Spec(constraints)]

    matching_specs: List[spack.spec.Spec] = []
    errors = []
    query_fn = query_fn or spack.store.STORE.db.query
    for spec in constraints:
        current_matches = query_fn(spec, **kwargs)

        # For each spec provided, make sure it refers to only one package.
        if not multiple and len(current_matches) > 1:
            msg_fmt = '"{0}" matches multiple packages: [{1}]'
            errors.append(msg_fmt.format(spec, ", ".join([m.format() for m in current_matches])))

        # No installed package matches the query
        if len(current_matches) == 0 and spec is not any:
            msg_fmt = '"{0}" does not match any installed packages'
            errors.append(msg_fmt.format(spec))

        matching_specs.extend(current_matches)

    if errors:
        raise MatchError(
            message="errors occurred when looking for specs in the store",
            long_message="\n".join(errors),
        )

    return matching_specs


def specfile_matches(filename: str, **kwargs) -> List["spack.spec.Spec"]:
    """Same as find but reads the query from a spec file.

    Args:
        filename: YAML or JSON file from which to read the query.
        **kwargs: keyword arguments forwarded to "find"
    """
    query = [spack.spec.Spec.from_specfile(filename)]
    return spack.store.find(query, **kwargs)


def ensure_singleton_created() -> None:
    """Ensures the lazily evaluated singleton is created"""
    _ = STORE.db


@contextlib.contextmanager
def use_store(
    path: Union[str, pathlib.Path], extra_data: Optional[Dict[str, Any]] = None
) -> Generator[Store, None, None]:
    """Use the store passed as argument within the context manager.

    Args:
        path: path to the store.
        extra_data: extra configuration under "config:install_tree" to be
            taken into account.

    Yields:
        Store object associated with the context manager's store
    """
    global STORE

    assert not isinstance(path, Store), "cannot pass a store anymore"
    scope_name = "use-store-{}".format(uuid.uuid4())
    data = {"root": str(path)}
    if extra_data:
        data.update(extra_data)

    # Swap the store with the one just constructed and return it
    ensure_singleton_created()
    spack.config.CONFIG.push_scope(
        spack.config.InternalConfigScope(name=scope_name, data={"config": {"install_tree": data}})
    )
    temporary_store = create(configuration=spack.config.CONFIG)
    original_store, STORE = STORE, temporary_store

    try:
        yield temporary_store
    finally:
        # Restore the original store
        STORE = original_store
        spack.config.CONFIG.remove_scope(scope_name=scope_name)


class MatchError(spack.error.SpackError):
    """Error occurring when trying to match specs in store against a constraint"""