summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorPeter Scheibel <scheibel1@llnl.gov>2022-04-28 10:56:26 -0700
committerGitHub <noreply@github.com>2022-04-28 10:56:26 -0700
commitbb43308c444651f560c920c248257fa9e8dc4b18 (patch)
tree20b195589fc36e522436a914c8002eab4fbc2a4f /lib
parent6a9df34abd35dc5c4292eac1da2d902a4b7f7702 (diff)
downloadspack-bb43308c444651f560c920c248257fa9e8dc4b18.tar.gz
spack-bb43308c444651f560c920c248257fa9e8dc4b18.tar.bz2
spack-bb43308c444651f560c920c248257fa9e8dc4b18.tar.xz
spack-bb43308c444651f560c920c248257fa9e8dc4b18.zip
Add command for reading JSON-based DB description (now with more tests) (#29652)
This is an amended version of https://github.com/spack/spack/pull/24894 (reverted in https://github.com/spack/spack/pull/29603). https://github.com/spack/spack/pull/24894 broke all instances of `spack external find` (namely when it is invoked without arguments/options) because it was mandating the presence of a file which most systems would not have. This allows `spack external find` to proceed if that file is not present and adds tests for this. - [x] Add a test which confirms that `spack external find` successfully reads a manifest file if present in the default manifest path --- Original commit message --- Adds `spack external read-cray-manifest`, which reads a json file that describes a set of package DAGs. The parsed results are stored directly in the database. A user can see these installed specs with `spack find` (like any installed spec). The easiest way to use them right now as dependencies is to run `spack spec ... ^/hash-of-external-package`. Changes include: * `spack external read-cray-manifest --file <path/to/file>` will add all specs described in the file to Spack's installation DB and will also install described compilers to the compilers configuration (the expected format of the file is described in this PR as well including examples of the file) * Database records now may include an "origin" (the command added in this PR registers the origin as "external-db"). In the future, it is assumed users may want to be able to treat installs registered with this command differently (e.g. they may want to uninstall all specs added with this command) * Hash properties are now always preserved when copying specs if the source spec is concrete * I don't think the hashes of installed-and-concrete specs should change and this was the easiest way to handle that * also specs that are concrete preserve their `.normal` property when copied (external specs may mention compilers that are not registered, and without this change they would fail in `normalize` when calling `validate_or_raise`) * it might be this should only be the case if the spec was installed - [x] Improve testing - [x] Specifically mark DB records added with this command (so that users can do something like "uninstall all packages added with `spack read-external-db`) * This is now possible with `spack uninstall --all --origin=external-db` (this will remove all specs added from manifest files) - [x] Strip variants that are listed in json entries but don't actually exist for the package
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/cmd/external.py91
-rw-r--r--lib/spack/spack/cmd/uninstall.py14
-rw-r--r--lib/spack/spack/compilers/__init__.py10
-rw-r--r--lib/spack/spack/cray_manifest.py165
-rw-r--r--lib/spack/spack/database.py13
-rw-r--r--lib/spack/spack/rewiring.py8
-rw-r--r--lib/spack/spack/schema/cray_manifest.py131
-rw-r--r--lib/spack/spack/solver/asp.py9
-rw-r--r--lib/spack/spack/spec.py56
-rw-r--r--lib/spack/spack/test/cmd/external.py42
-rw-r--r--lib/spack/spack/test/conftest.py27
-rw-r--r--lib/spack/spack/test/cray_manifest.py247
-rw-r--r--lib/spack/spack/test/rewiring.py2
-rw-r--r--lib/spack/spack/test/spec_dag.py2
14 files changed, 764 insertions, 53 deletions
diff --git a/lib/spack/spack/cmd/external.py b/lib/spack/spack/cmd/external.py
index 42f033d979..50d77890fd 100644
--- a/lib/spack/spack/cmd/external.py
+++ b/lib/spack/spack/cmd/external.py
@@ -5,6 +5,7 @@
from __future__ import print_function
import argparse
+import os
import sys
import llnl.util.tty as tty
@@ -13,6 +14,7 @@ import llnl.util.tty.colify as colify
import spack
import spack.cmd
import spack.cmd.common.arguments
+import spack.cray_manifest as cray_manifest
import spack.detection
import spack.error
import spack.util.environment
@@ -55,8 +57,40 @@ def setup_parser(subparser):
'list', help='list detectable packages, by repository and name'
)
+ read_cray_manifest = sp.add_parser(
+ 'read-cray-manifest', help=(
+ "consume a Spack-compatible description of externally-installed "
+ "packages, including dependency relationships"
+ )
+ )
+ read_cray_manifest.add_argument(
+ '--file', default=None,
+ help="specify a location other than the default")
+ read_cray_manifest.add_argument(
+ '--directory', default=None,
+ help="specify a directory storing a group of manifest files")
+ read_cray_manifest.add_argument(
+ '--dry-run', action='store_true', default=False,
+ help="don't modify DB with files that are read")
+ read_cray_manifest.add_argument(
+ '--fail-on-error', action='store_true',
+ help=("if a manifest file cannot be parsed, fail and report the "
+ "full stack trace")
+ )
+
def external_find(args):
+ if args.all or not (args.tags or args.packages):
+ # If the user calls 'spack external find' with no arguments, and
+ # this system has a description of installed packages, then we should
+ # consume it automatically.
+ try:
+ _collect_and_consume_cray_manifest_files()
+ except NoManifestFileError:
+ # It's fine to not find any manifest file if we are doing the
+ # search implicitly (i.e. as part of 'spack external find')
+ pass
+
# If the user didn't specify anything, search for build tools by default
if not args.tags and not args.all and not args.packages:
args.tags = ['core-packages', 'build-tools']
@@ -106,6 +140,56 @@ def external_find(args):
tty.msg('No new external packages detected')
+def external_read_cray_manifest(args):
+ _collect_and_consume_cray_manifest_files(
+ manifest_file=args.file,
+ manifest_directory=args.directory,
+ dry_run=args.dry_run,
+ fail_on_error=args.fail_on_error
+ )
+
+
+def _collect_and_consume_cray_manifest_files(
+ manifest_file=None, manifest_directory=None, dry_run=False,
+ fail_on_error=False):
+
+ manifest_files = []
+ if manifest_file:
+ manifest_files.append(manifest_file)
+
+ manifest_dirs = []
+ if manifest_directory:
+ manifest_dirs.append(manifest_directory)
+
+ if os.path.isdir(cray_manifest.default_path):
+ tty.debug(
+ "Cray manifest path {0} exists: collecting all files to read."
+ .format(cray_manifest.default_path))
+ manifest_dirs.append(cray_manifest.default_path)
+ else:
+ tty.debug("Default Cray manifest directory {0} does not exist."
+ .format(cray_manifest.default_path))
+
+ for directory in manifest_dirs:
+ for fname in os.listdir(directory):
+ manifest_files.append(os.path.join(directory, fname))
+
+ if not manifest_files:
+ raise NoManifestFileError(
+ "--file/--directory not specified, and no manifest found at {0}"
+ .format(cray_manifest.default_path))
+
+ for path in manifest_files:
+ try:
+ cray_manifest.read(path, not dry_run)
+ except (spack.compilers.UnknownCompilerError, spack.error.SpackError) as e:
+ if fail_on_error:
+ raise
+ else:
+ tty.warn("Failure reading manifest file: {0}"
+ "\n\t{1}".format(path, str(e)))
+
+
def external_list(args):
# Trigger a read of all packages, might take a long time.
list(spack.repo.path.all_packages())
@@ -117,5 +201,10 @@ def external_list(args):
def external(parser, args):
- action = {'find': external_find, 'list': external_list}
+ action = {'find': external_find, 'list': external_list,
+ 'read-cray-manifest': external_read_cray_manifest}
action[args.external_command](args)
+
+
+class NoManifestFileError(spack.error.SpackError):
+ pass
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index d82cb7eb3f..ce3d6f7dab 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -62,9 +62,14 @@ def setup_parser(subparser):
'-a', '--all', action='store_true', dest='all',
help="remove ALL installed packages that match each supplied spec"
)
+ subparser.add_argument(
+ '--origin', dest='origin',
+ help="only remove DB records with the specified origin"
+ )
-def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
+def find_matching_specs(env, specs, allow_multiple_matches=False, force=False,
+ origin=None):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
@@ -85,8 +90,8 @@ def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
has_errors = False
for spec in specs:
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
- matching = spack.store.db.query_local(spec, hashes=hashes,
- installed=install_query)
+ matching = spack.store.db.query_local(
+ spec, hashes=hashes, installed=install_query, origin=origin)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
@@ -240,7 +245,8 @@ def do_uninstall(env, specs, force):
def get_uninstall_list(args, specs, env):
# Gets the list of installed specs that match the ones give via cli
# args.all takes care of the case where '-a' is given in the cli
- uninstall_list = find_matching_specs(env, specs, args.all, args.force)
+ uninstall_list = find_matching_specs(env, specs, args.all, args.force,
+ args.origin)
# Takes care of '-R'
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 5aad6b2b98..73ba07237d 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -495,7 +495,8 @@ def get_compiler_duplicates(compiler_spec, arch_spec):
@llnl.util.lang.memoized
def class_for_compiler_name(compiler_name):
"""Given a compiler module name, get the corresponding Compiler class."""
- assert supported(compiler_name)
+ if not supported(compiler_name):
+ raise UnknownCompilerError(compiler_name)
# Hack to be able to call the compiler `apple-clang` while still
# using a valid python name for the module
@@ -788,6 +789,13 @@ class NoCompilersError(spack.error.SpackError):
"Spack could not find any compilers!")
+class UnknownCompilerError(spack.error.SpackError):
+ def __init__(self, compiler_name):
+ super(UnknownCompilerError, self).__init__(
+ "Spack doesn't support the requested compiler: {0}"
+ .format(compiler_name))
+
+
class NoCompilerForSpecError(spack.error.SpackError):
def __init__(self, compiler_spec, target):
super(NoCompilerForSpecError, self).__init__(
diff --git a/lib/spack/spack/cray_manifest.py b/lib/spack/spack/cray_manifest.py
new file mode 100644
index 0000000000..6b9af13a90
--- /dev/null
+++ b/lib/spack/spack/cray_manifest.py
@@ -0,0 +1,165 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import json
+
+import jsonschema
+import six
+
+import llnl.util.tty as tty
+
+import spack.cmd
+import spack.hash_types as hash_types
+from spack.schema.cray_manifest import schema as manifest_schema
+
+#: Cray systems can store a Spack-compatible description of system
+#: packages here.
+default_path = '/opt/cray/pe/cpe-descriptive-manifest/'
+
+
+def compiler_from_entry(entry):
+ compiler_name = entry['name']
+ paths = entry['executables']
+ version = entry['version']
+ arch = entry['arch']
+ operating_system = arch['os']
+ target = arch['target']
+
+ compiler_cls = spack.compilers.class_for_compiler_name(compiler_name)
+ spec = spack.spec.CompilerSpec(compiler_cls.name, version)
+ paths = [paths.get(x, None) for x in ('cc', 'cxx', 'f77', 'fc')]
+ return compiler_cls(
+ spec, operating_system, target, paths
+ )
+
+
+def spec_from_entry(entry):
+ arch_str = ""
+ if 'arch' in entry:
+ arch_format = "arch={platform}-{os}-{target}"
+ arch_str = arch_format.format(
+ platform=entry['arch']['platform'],
+ os=entry['arch']['platform_os'],
+ target=entry['arch']['target']['name']
+ )
+
+ compiler_str = ""
+ if 'compiler' in entry:
+ compiler_format = "%{name}@{version}"
+ compiler_str = compiler_format.format(
+ name=entry['compiler']['name'],
+ version=entry['compiler']['version']
+ )
+
+ spec_format = "{name}@{version} {compiler} {arch}"
+ spec_str = spec_format.format(
+ name=entry['name'],
+ version=entry['version'],
+ compiler=compiler_str,
+ arch=arch_str
+ )
+
+ package = spack.repo.get(entry['name'])
+
+ if 'parameters' in entry:
+ variant_strs = list()
+ for name, value in entry['parameters'].items():
+ # TODO: also ensure that the variant value is valid?
+ if not (name in package.variants):
+ tty.debug("Omitting variant {0} for entry {1}/{2}"
+ .format(name, entry['name'], entry['hash'][:7]))
+ continue
+
+ # Value could be a list (of strings), boolean, or string
+ if isinstance(value, six.string_types):
+ variant_strs.append('{0}={1}'.format(name, value))
+ else:
+ try:
+ iter(value)
+ variant_strs.append(
+ '{0}={1}'.format(name, ','.join(value)))
+ continue
+ except TypeError:
+ # Not an iterable
+ pass
+ # At this point not a string or collection, check for boolean
+ if value in [True, False]:
+ bool_symbol = '+' if value else '~'
+ variant_strs.append('{0}{1}'.format(bool_symbol, name))
+ else:
+ raise ValueError(
+ "Unexpected value for {0} ({1}): {2}".format(
+ name, str(type(value)), str(value)
+ )
+ )
+ spec_str += ' ' + ' '.join(variant_strs)
+
+ spec, = spack.cmd.parse_specs(spec_str.split())
+
+ for ht in [hash_types.dag_hash, hash_types.build_hash,
+ hash_types.full_hash]:
+ setattr(spec, ht.attr, entry['hash'])
+
+ spec._concrete = True
+ spec._hashes_final = True
+ spec.external_path = entry['prefix']
+ spec.origin = 'external-db'
+ spack.spec.Spec.ensure_valid_variants(spec)
+
+ return spec
+
+
+def entries_to_specs(entries):
+ spec_dict = {}
+ for entry in entries:
+ try:
+ spec = spec_from_entry(entry)
+ spec_dict[spec._hash] = spec
+ except spack.repo.UnknownPackageError:
+ tty.debug("Omitting package {0}: no corresponding repo package"
+ .format(entry['name']))
+ except spack.error.SpackError:
+ raise
+ except Exception:
+ tty.warn("Could not parse entry: " + str(entry))
+
+ for entry in filter(lambda x: 'dependencies' in x, entries):
+ dependencies = entry['dependencies']
+ for name, properties in dependencies.items():
+ dep_hash = properties['hash']
+ deptypes = properties['type']
+ if dep_hash in spec_dict:
+ if entry['hash'] not in spec_dict:
+ continue
+ parent_spec = spec_dict[entry['hash']]
+ dep_spec = spec_dict[dep_hash]
+ parent_spec._add_dependency(dep_spec, deptypes)
+
+ return spec_dict
+
+
+def read(path, apply_updates):
+ with open(path, 'r') as json_file:
+ json_data = json.load(json_file)
+
+ jsonschema.validate(json_data, manifest_schema)
+
+ specs = entries_to_specs(json_data['specs'])
+ tty.debug("{0}: {1} specs read from manifest".format(
+ path,
+ str(len(specs))))
+ compilers = list()
+ if 'compilers' in json_data:
+ compilers.extend(compiler_from_entry(x)
+ for x in json_data['compilers'])
+ tty.debug("{0}: {1} compilers read from manifest".format(
+ path,
+ str(len(compilers))))
+ if apply_updates and compilers:
+ spack.compilers.add_compilers_to_config(
+ compilers, init_config=False)
+ if apply_updates:
+ for spec in specs.values():
+ spack.store.db.add(spec, directory_layout=None)
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index 3c7258728f..6a16a83e1c 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -187,6 +187,7 @@ class InstallRecord(object):
installation_time=None,
deprecated_for=None,
in_buildcache=False,
+ origin=None
):
self.spec = spec
self.path = str(path) if path else None
@@ -196,6 +197,7 @@ class InstallRecord(object):
self.installation_time = installation_time or _now()
self.deprecated_for = deprecated_for
self.in_buildcache = in_buildcache
+ self.origin = origin
def install_type_matches(self, installed):
installed = InstallStatuses.canonicalize(installed)
@@ -217,6 +219,9 @@ class InstallRecord(object):
else:
rec_dict.update({field_name: getattr(self, field_name)})
+ if self.origin:
+ rec_dict['origin'] = self.origin
+
return rec_dict
@classmethod
@@ -1131,6 +1136,10 @@ class Database(object):
'explicit': explicit,
'installation_time': installation_time
}
+ # Commands other than 'spack install' may add specs to the DB,
+ # we can record the source of an installed Spec with 'origin'
+ if hasattr(spec, 'origin'):
+ extra_args['origin'] = spec.origin
self._data[key] = InstallRecord(
new_spec, path, installed, ref_count=0, **extra_args
)
@@ -1462,6 +1471,7 @@ class Database(object):
end_date=None,
hashes=None,
in_buildcache=any,
+ origin=None
):
"""Run a query on the database."""
@@ -1490,6 +1500,9 @@ class Database(object):
if hashes is not None and rec.spec.dag_hash() not in hashes:
continue
+ if origin and not (origin == rec.origin):
+ continue
+
if not rec.install_type_matches(installed):
continue
diff --git a/lib/spack/spack/rewiring.py b/lib/spack/spack/rewiring.py
index feeb8d1a14..c91485219b 100644
--- a/lib/spack/spack/rewiring.py
+++ b/lib/spack/spack/rewiring.py
@@ -95,7 +95,8 @@ def rewire_node(spec, explicit):
spec.prefix)
relocate.relocate_text_bin(binaries=bins_to_relocate,
prefixes=prefix_to_prefix)
- # copy package into place (shutil.copytree)
+ # Copy package into place, except for spec.json (because spec.json
+ # describes the old spec and not the new spliced spec).
shutil.copytree(os.path.join(tempdir, spec.dag_hash()), spec.prefix,
ignore=shutil.ignore_patterns('spec.json',
'install_manifest.json'))
@@ -104,7 +105,10 @@ def rewire_node(spec, explicit):
spec.build_spec.prefix,
spec.prefix)
shutil.rmtree(tempdir)
- # handle all metadata changes; don't copy over spec.json file in .spack/
+ # Above, we did not copy spec.json: instead, here we write the new
+ # (spliced) spec into spec.json, without this, Database.add would fail on
+ # the next line (because it checks the spec.json in the prefix against the
+ # spec being added to look for mismatches)
spack.store.layout.write_spec(spec, spack.store.layout.spec_file_path(spec))
# add to database, not sure about explicit
spack.store.db.add(spec, spack.store.layout, explicit=explicit)
diff --git a/lib/spack/spack/schema/cray_manifest.py b/lib/spack/spack/schema/cray_manifest.py
new file mode 100644
index 0000000000..9613bbf1dc
--- /dev/null
+++ b/lib/spack/spack/schema/cray_manifest.py
@@ -0,0 +1,131 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+"""Schema for Cray descriptive manifest: this describes a set of
+ installed packages on the system and also specifies dependency
+ relationships between them (so this provides more information than
+ external entries in packages configuration).
+
+ This does not specify a configuration - it is an input format
+ that is consumed and transformed into Spack DB records.
+"""
+
+schema = {
+ "$schema": "http://json-schema.org/schema#",
+ "title": "CPE manifest schema",
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "_meta": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "file-type": {"type": "string", "minLength": 1},
+ "cpe-version": {"type": "string", "minLength": 1},
+ "system-type": {"type": "string", "minLength": 1},
+ "schema-version": {"type": "string", "minLength": 1},
+ }
+ },
+ "compilers": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "name": {"type": "string", "minLength": 1},
+ "version": {"type": "string", "minLength": 1},
+ "prefix": {"type": "string", "minLength": 1},
+ "executables": {
+ "type": "object",
+ "additionalProperties": False,
+ "properties": {
+ "cc": {"type": "string", "minLength": 1},
+ "cxx": {"type": "string", "minLength": 1},
+ "fc": {"type": "string", "minLength": 1}
+ }
+ },
+ "arch": {
+ "type": "object",
+ "required": ["os", "target"],
+ "additionalProperties": False,
+ "properties": {
+ "os": {"type": "string", "minLength": 1},
+ "target": {"type": "string", "minLength": 1}
+ }
+ }
+ }
+ }
+ },
+ "specs": {
+ "type": "array",
+ "items": {
+ "type": "object",
+ "required": [
+ "name",
+ "version",
+ "arch",
+ "compiler",
+ "prefix",
+ "hash"],
+ "additionalProperties": False,
+ "properties": {
+ "name": {"type": "string", "minLength": 1},
+ "version": {"type": "string", "minLength": 1},
+ "arch": {
+ "type": "object",
+ "required": ["platform", "platform_os", "target"],
+ "additioanlProperties": False,
+ "properties": {
+ "platform": {"type": "string", "minLength": 1},
+ "platform_os": {"type": "string", "minLength": 1},
+ "target": {
+ "type": "object",
+ "additionalProperties": False,
+ "required": ["name"],
+ "properties": {
+ "name": {"type": "string", "minLength": 1}
+ }
+ }
+ }
+ },
+ "compiler": {
+ "type": "object",
+ "required": ["name", "version"],
+ "additionalProperties": False,
+ "properties": {
+ "name": {"type": "string", "minLength": 1},
+ "version": {"type": "string", "minLength": 1}
+ }
+ },
+ "dependencies": {
+ "type": "object",
+ "patternProperties": {
+ "\\w[\\w-]*": {
+ "type": "object",
+ "required": ["hash"],
+ "additionalProperties": False,
+ "properties": {
+ "hash": {"type": "string", "minLength": 1},
+ "type": {
+ "type": "array",
+ "items": {
+ "type": "string", "minLength": 1}
+ }
+ }
+ }
+ }
+ },
+ "prefix": {
+ "type": "string", "minLength": 1},
+ "rpm": {"type": "string", "minLength": 1},
+ "hash": {"type": "string", "minLength": 1},
+ "parameters": {
+ "type": "object",
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/lib/spack/spack/solver/asp.py b/lib/spack/spack/solver/asp.py
index d3a29fe250..52f1b8f67a 100644
--- a/lib/spack/spack/solver/asp.py
+++ b/lib/spack/spack/solver/asp.py
@@ -1526,9 +1526,12 @@ class SpackSolverSetup(object):
continue
if strict and s.compiler not in cspecs:
- raise spack.concretize.UnavailableCompilerVersionError(
- s.compiler
- )
+ if not s.concrete:
+ raise spack.concretize.UnavailableCompilerVersionError(
+ s.compiler
+ )
+ # Allow unknown compilers to exist if the associated spec
+ # is already built
else:
cspecs.add(s.compiler)
self.gen.fact(fn.allow_compiler(
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index c37d3e754e..5189844b63 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -2943,7 +2943,7 @@ class Spec(object):
if a list of names activate them for the packages in the list,
if True activate 'test' dependencies for all packages.
"""
- clone = self.copy(caches=True)
+ clone = self.copy()
clone.concretize(tests=tests)
return clone
@@ -3242,8 +3242,8 @@ class Spec(object):
"Attempting to normalize anonymous spec")
# Set _normal and _concrete to False when forced
- if force:
- self._mark_concrete(False)
+ if force and not self._concrete:
+ self._normal = False
if self._normal:
return False
@@ -3712,7 +3712,7 @@ class Spec(object):
return patches
- def _dup(self, other, deps=True, cleardeps=True, caches=None):
+ def _dup(self, other, deps=True, cleardeps=True):
"""Copy the spec other into self. This is an overwriting
copy. It does not copy any dependents (parents), but by default
copies dependencies.
@@ -3727,10 +3727,6 @@ class Spec(object):
cleardeps (bool): if True clears the dependencies of ``self``,
before possibly copying the dependencies of ``other`` onto
``self``
- caches (bool or None): preserve cached fields such as
- ``_normal``, ``_hash``, and ``_dunder_hash``. By
- default this is ``False`` if DAG structure would be
- changed by the copy, ``True`` if it's an exact copy.
Returns:
True if ``self`` changed because of the copy operation,
@@ -3781,12 +3777,6 @@ class Spec(object):
self.extra_attributes = other.extra_attributes
self.namespace = other.namespace
- # Cached fields are results of expensive operations.
- # If we preserved the original structure, we can copy them
- # safely. If not, they need to be recomputed.
- if caches is None:
- caches = (deps is True or deps == dp.all_deptypes)
-
# If we copy dependencies, preserve DAG structure in the new spec
if deps:
# If caller restricted deptypes to be copied, adjust that here.
@@ -3794,29 +3784,31 @@ class Spec(object):
deptypes = dp.all_deptypes
if isinstance(deps, (tuple, list)):
deptypes = deps
- self._dup_deps(other, deptypes, caches)
+ self._dup_deps(other, deptypes)
self._concrete = other._concrete
self._hashes_final = other._hashes_final
- if caches:
+ if self._concrete:
self._hash = other._hash
self._build_hash = other._build_hash
self._dunder_hash = other._dunder_hash
- self._normal = other._normal
+ self._normal = True
self._full_hash = other._full_hash
self._package_hash = other._package_hash
else:
self._hash = None
self._build_hash = None
self._dunder_hash = None
+ # Note, we could use other._normal if we are copying all deps, but
+ # always set it False here to avoid the complexity of checking
self._normal = False
self._full_hash = None
self._package_hash = None
return changed
- def _dup_deps(self, other, deptypes, caches):
+ def _dup_deps(self, other, deptypes):
def spid(spec):
return id(spec)
@@ -3827,11 +3819,11 @@ class Spec(object):
if spid(edge.parent) not in new_specs:
new_specs[spid(edge.parent)] = edge.parent.copy(
- deps=False, caches=caches
+ deps=False
)
if spid(edge.spec) not in new_specs:
- new_specs[spid(edge.spec)] = edge.spec.copy(deps=False, caches=caches)
+ new_specs[spid(edge.spec)] = edge.spec.copy(deps=False)
new_specs[spid(edge.parent)].add_dependency_edge(
new_specs[spid(edge.spec)], edge.deptypes
@@ -4709,16 +4701,16 @@ class Spec(object):
return False
return True
- self_nodes = dict((s.name, s.copy(deps=False, caches=True))
+ self_nodes = dict((s.name, s.copy(deps=False))
for s in self.traverse(root=True)
if from_self(s.name, transitive))
if transitive:
- other_nodes = dict((s.name, s.copy(deps=False, caches=True))
+ other_nodes = dict((s.name, s.copy(deps=False))
for s in other.traverse(root=True))
else:
# NOTE: Does not fully validate providers; loader races possible
- other_nodes = dict((s.name, s.copy(deps=False, caches=True))
+ other_nodes = dict((s.name, s.copy(deps=False))
for s in other.traverse(root=True)
if s is other or s.name not in self)
@@ -4751,23 +4743,12 @@ class Spec(object):
for dep in ret.traverse(root=True, order='post'):
opposite = other_nodes if dep.name in self_nodes else self_nodes
if any(name in dep for name in opposite.keys()):
- # Record whether hashes are already cached
- # So we don't try to compute a hash from insufficient
- # provenance later
- has_build_hash = getattr(dep, ht.build_hash.name, None)
- has_full_hash = getattr(dep, ht.full_hash.name, None)
-
# package hash cannot be affected by splice
dep.clear_cached_hashes(ignore=['package_hash'])
- # Since this is a concrete spec, we want to make sure hashes
- # are cached writing specs only writes cached hashes in case
- # the spec is too old to have full provenance for these hashes,
- # so we can't rely on doing it at write time.
- if has_build_hash:
- _ = dep.build_hash()
- if has_full_hash:
- _ = dep.full_hash()
+ dep.build_hash()
+ dep.full_hash()
+ dep.dag_hash()
return nodes[self.name]
@@ -4779,6 +4760,7 @@ class Spec(object):
if h.attr not in ignore:
if hasattr(self, h.attr):
setattr(self, h.attr, None)
+ self._dunder_hash = None
def __hash__(self):
# If the spec is concrete, we leverage the DAG hash and just use
diff --git a/lib/spack/spack/test/cmd/external.py b/lib/spack/spack/test/cmd/external.py
index 3a3c4face3..bfdcb0b4eb 100644
--- a/lib/spack/spack/test/cmd/external.py
+++ b/lib/spack/spack/test/cmd/external.py
@@ -167,6 +167,48 @@ def test_find_external_cmd_full_repo(
assert {'spec': 'find-externals1@1.foo', 'prefix': prefix} in pkg_externals
+def test_find_external_no_manifest(
+ mutable_config, working_env, mock_executable, mutable_mock_repo,
+ _platform_executables, monkeypatch):
+ """The user runs 'spack external find'; the default path for storing
+ manifest files does not exist. Ensure that the command does not
+ fail.
+ """
+ monkeypatch.setenv('PATH', '')
+ monkeypatch.setattr(spack.cray_manifest, 'default_path',
+ os.path.join('a', 'path', 'that', 'doesnt', 'exist'))
+ external('find')
+
+
+def test_find_external_empty_default_manifest_dir(
+ mutable_config, working_env, mock_executable, mutable_mock_repo,
+ _platform_executables, tmpdir, monkeypatch):
+ """The user runs 'spack external find'; the default path for storing
+ manifest files exists but is empty. Ensure that the command does not
+ fail.
+ """
+ empty_manifest_dir = str(tmpdir.mkdir('manifest_dir'))
+ monkeypatch.setenv('PATH', '')
+ monkeypatch.setattr(spack.cray_manifest, 'default_path',
+ empty_manifest_dir)
+ external('find')
+
+
+def test_find_external_nonempty_default_manifest_dir(
+ mutable_database, mutable_mock_repo,
+ _platform_executables, tmpdir, monkeypatch,
+ directory_with_manifest):
+ """The user runs 'spack external find'; the default manifest directory
+ contains a manifest file. Ensure that the specs are read.
+ """
+ monkeypatch.setenv('PATH', '')
+ monkeypatch.setattr(spack.cray_manifest, 'default_path',
+ str(directory_with_manifest))
+ external('find')
+ specs = spack.store.db.query('hwloc')
+ assert any(x.dag_hash() == 'hwlocfakehashaaa' for x in specs)
+
+
def test_find_external_merge(mutable_config, mutable_mock_repo):
"""Check that 'spack find external' doesn't overwrite an existing spec
entry in packages.yaml.
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
index 1a1b15f469..11a24ba044 100644
--- a/lib/spack/spack/test/conftest.py
+++ b/lib/spack/spack/test/conftest.py
@@ -43,6 +43,7 @@ import spack.repo
import spack.stage
import spack.store
import spack.subprocess_context
+import spack.test.cray_manifest
import spack.util.executable
import spack.util.gpg
import spack.util.spack_yaml as syaml
@@ -809,7 +810,16 @@ def database(mock_store, mock_packages, config):
@pytest.fixture(scope='function')
-def mutable_database(database, _store_dir_and_cache):
+def database_mutable_config(mock_store, mock_packages, mutable_config,
+ monkeypatch):
+ """This activates the mock store, packages, AND config."""
+ with spack.store.use_store(str(mock_store)) as store:
+ yield store.db
+ store.db.last_seen_verifier = ''
+
+
+@pytest.fixture(scope='function')
+def mutable_database(database_mutable_config, _store_dir_and_cache):
"""Writeable version of the fixture, restored to its initial state
after each test.
"""
@@ -817,7 +827,7 @@ def mutable_database(database, _store_dir_and_cache):
store_path, store_cache = _store_dir_and_cache
store_path.join('.spack-db').chmod(mode=0o755, rec=1)
- yield database
+ yield database_mutable_config
# Restore the initial state by copying the content of the cache back into
# the store and making the database read-only
@@ -1635,6 +1645,19 @@ def brand_new_binary_cache():
spack.binary_distribution._binary_index)
+@pytest.fixture
+def directory_with_manifest(tmpdir):
+ """Create a manifest file in a directory. Used by 'spack external'.
+ """
+ with tmpdir.as_cwd():
+ test_db_fname = 'external-db.json'
+ with open(test_db_fname, 'w') as db_file:
+ json.dump(spack.test.cray_manifest.create_manifest_content(),
+ db_file)
+
+ yield str(tmpdir)
+
+
@pytest.fixture()
def noncyclical_dir_structure(tmpdir):
"""
diff --git a/lib/spack/spack/test/cray_manifest.py b/lib/spack/spack/test/cray_manifest.py
new file mode 100644
index 0000000000..d93990c677
--- /dev/null
+++ b/lib/spack/spack/test/cray_manifest.py
@@ -0,0 +1,247 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+import json
+
+import pytest
+
+import spack
+import spack.cray_manifest as cray_manifest
+from spack.cray_manifest import compiler_from_entry, entries_to_specs
+
+example_x_json_str = """\
+{
+ "name": "packagex",
+ "hash": "hash-of-x",
+ "prefix": "/path/to/packagex-install/",
+ "version": "1.0",
+ "arch": {
+ "platform": "linux",
+ "platform_os": "centos8",
+ "target": {
+ "name": "haswell"
+ }
+ },
+ "compiler": {
+ "name": "gcc",
+ "version": "10.2.0"
+ },
+ "dependencies": {
+ "packagey": {
+ "hash": "hash-of-y",
+ "type": ["link"]
+ }
+ },
+ "parameters": {
+ "precision": ["double", "float"]
+ }
+}
+"""
+
+
+example_compiler_entry = """\
+{
+ "name": "gcc",
+ "prefix": "/path/to/compiler/",
+ "version": "7.5.0",
+ "arch": {
+ "os": "centos8",
+ "target": "x86_64"
+ },
+ "executables": {
+ "cc": "/path/to/compiler/cc",
+ "cxx": "/path/to/compiler/cxx",
+ "fc": "/path/to/compiler/fc"
+ }
+}
+"""
+
+
+class JsonSpecEntry(object):
+ def __init__(self, name, hash, prefix, version, arch, compiler,
+ dependencies, parameters):
+ self.name = name
+ self.hash = hash
+ self.prefix = prefix
+ self.version = version
+ self.arch = arch
+ self.compiler = compiler
+ self.dependencies = dependencies
+ self.parameters = parameters
+
+ def to_dict(self):
+ return {
+ 'name': self.name,
+ 'hash': self.hash,
+ 'prefix': self.prefix,
+ 'version': self.version,
+ 'arch': self.arch,
+ 'compiler': self.compiler,
+ 'dependencies': self.dependencies,
+ 'parameters': self.parameters
+ }
+
+ def as_dependency(self, deptypes):
+ return (self.name,
+ {'hash': self.hash,
+ 'type': list(deptypes)})
+
+
+class JsonArchEntry(object):
+ def __init__(self, platform, os, target):
+ self.platform = platform
+ self.os = os
+ self.target = target
+
+ def to_dict(self):
+ return {
+ 'platform': self.platform,
+ 'platform_os': self.os,
+ 'target': {
+ 'name': self.target
+ }
+ }
+
+
+class JsonCompilerEntry(object):
+ def __init__(self, name, version):
+ self.name = name
+ self.version = version
+
+ def to_dict(self):
+ return {
+ 'name': self.name,
+ 'version': self.version
+ }
+
+
+_common_arch = JsonArchEntry(
+ platform='linux',
+ os='centos8',
+ target='haswell'
+).to_dict()
+
+
+_common_compiler = JsonCompilerEntry(
+ name='gcc',
+ version='10.2.0'
+).to_dict()
+
+
+def test_compatibility():
+ """Make sure that JsonSpecEntry outputs the expected JSON structure
+ by comparing it with JSON parsed from an example string. This
+ ensures that the testing objects like JsonSpecEntry produce the
+ same JSON structure as the expected file format.
+ """
+ y = JsonSpecEntry(
+ name='packagey',
+ hash='hash-of-y',
+ prefix='/path/to/packagey-install/',
+ version='1.0',
+ arch=_common_arch,
+ compiler=_common_compiler,
+ dependencies={},
+ parameters={}
+ )
+
+ x = JsonSpecEntry(
+ name='packagex',
+ hash='hash-of-x',
+ prefix='/path/to/packagex-install/',
+ version='1.0',
+ arch=_common_arch,
+ compiler=_common_compiler,
+ dependencies=dict([y.as_dependency(deptypes=['link'])]),
+ parameters={'precision': ['double', 'float']}
+ )
+
+ x_from_entry = x.to_dict()
+ x_from_str = json.loads(example_x_json_str)
+ assert x_from_entry == x_from_str
+
+
+def test_compiler_from_entry():
+ compiler_data = json.loads(example_compiler_entry)
+ compiler_from_entry(compiler_data)
+
+
+def generate_openmpi_entries():
+ """Generate two example JSON entries that refer to an OpenMPI
+ installation and a hwloc dependency.
+ """
+ # The hashes need to be padded with 'a' at the end to align with 8-byte
+ # boundaries (for base-32 decoding)
+ hwloc = JsonSpecEntry(
+ name='hwloc',
+ hash='hwlocfakehashaaa',
+ prefix='/path/to/hwloc-install/',
+ version='2.0.3',
+ arch=_common_arch,
+ compiler=_common_compiler,
+ dependencies={},
+ parameters={}
+ )
+
+ # This includes a variant which is guaranteed not to appear in the
+ # OpenMPI package: we need to make sure we can use such package
+ # descriptions.
+ openmpi = JsonSpecEntry(
+ name='openmpi',
+ hash='openmpifakehasha',
+ prefix='/path/to/openmpi-install/',
+ version='4.1.0',
+ arch=_common_arch,
+ compiler=_common_compiler,
+ dependencies=dict([hwloc.as_dependency(deptypes=['link'])]),
+ parameters={
+ 'internal-hwloc': False,
+ 'fabrics': ['psm'],
+ 'missing_variant': True
+ }
+ )
+
+ return [openmpi, hwloc]
+
+
+def test_spec_conversion():
+ """Given JSON entries, check that we can form a set of Specs
+ including dependency references.
+ """
+ entries = list(x.to_dict() for x in generate_openmpi_entries())
+ specs = entries_to_specs(entries)
+ openmpi_spec, = list(x for x in specs.values() if x.name == 'openmpi')
+ assert openmpi_spec['hwloc']
+
+
+def create_manifest_content():
+ return {
+ 'specs': list(x.to_dict() for x in generate_openmpi_entries()),
+ 'compilers': []
+ }
+
+
+def test_read_cray_manifest(
+ tmpdir, mutable_config, mock_packages, mutable_database):
+ """Check that (a) we can read the cray manifest and add it to the Spack
+ Database and (b) we can concretize specs based on that.
+ """
+ if spack.config.get('config:concretizer') == 'clingo':
+ pytest.skip("The ASP-based concretizer is currently picky about "
+ " OS matching and will fail.")
+
+ with tmpdir.as_cwd():
+ test_db_fname = 'external-db.json'
+ with open(test_db_fname, 'w') as db_file:
+ json.dump(create_manifest_content(), db_file)
+ cray_manifest.read(test_db_fname, True)
+ query_specs = spack.store.db.query('openmpi')
+ assert any(x.dag_hash() == 'openmpifakehasha' for x in query_specs)
+
+ concretized_specs = spack.cmd.parse_specs(
+ 'depends-on-openmpi %gcc@4.5.0 arch=test-redhat6-x86_64'
+ ' ^/openmpifakehasha'.split(),
+ concretize=True)
+ assert concretized_specs[0]['hwloc'].dag_hash() == 'hwlocfakehashaaa'
diff --git a/lib/spack/spack/test/rewiring.py b/lib/spack/spack/test/rewiring.py
index e15be34fdc..f883c222a5 100644
--- a/lib/spack/spack/test/rewiring.py
+++ b/lib/spack/spack/test/rewiring.py
@@ -23,7 +23,7 @@ else:
@pytest.mark.requires_executables(*args)
@pytest.mark.parametrize('transitive', [True, False])
-def test_rewire(mock_fetch, install_mockery, transitive):
+def test_rewire_db(mock_fetch, install_mockery, transitive):
"""Tests basic rewiring without binary executables."""
spec = Spec('splice-t^splice-h~foo').concretized()
dep = Spec('splice-h+foo').concretized()
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index dcf830fd78..78555340a7 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -602,8 +602,6 @@ class TestSpecDag(object):
assert orig == copy
assert orig.eq_dag(copy)
- assert orig._normal == copy._normal
- assert orig._concrete == copy._concrete
# ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse())