summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorScott Wittenburg <scott.wittenburg@kitware.com>2022-01-28 09:49:15 -0700
committerTodd Gamblin <tgamblin@llnl.gov>2022-05-13 10:45:12 -0700
commitf6e7c0b740035a4d4eb5e3cbb356384dd295b8bb (patch)
treee28f11e44d961b66653fa3501625d92e53060ecf
parent512645ff2efbf522aefc55e9384a9f5025c98d91 (diff)
downloadspack-f6e7c0b740035a4d4eb5e3cbb356384dd295b8bb.tar.gz
spack-f6e7c0b740035a4d4eb5e3cbb356384dd295b8bb.tar.bz2
spack-f6e7c0b740035a4d4eb5e3cbb356384dd295b8bb.tar.xz
spack-f6e7c0b740035a4d4eb5e3cbb356384dd295b8bb.zip
hashes: remove full_hash and build_hash from spack
-rw-r--r--lib/spack/spack/binary_distribution.py274
-rw-r--r--lib/spack/spack/ci.py14
-rw-r--r--lib/spack/spack/cmd/buildcache.py11
-rw-r--r--lib/spack/spack/cmd/ci.py57
-rw-r--r--lib/spack/spack/cmd/install.py8
-rw-r--r--lib/spack/spack/cmd/solve.py4
-rw-r--r--lib/spack/spack/cmd/spec.py4
-rw-r--r--lib/spack/spack/cmd/test.py5
-rw-r--r--lib/spack/spack/database.py15
-rw-r--r--lib/spack/spack/directory_layout.py10
-rw-r--r--lib/spack/spack/environment/environment.py17
-rw-r--r--lib/spack/spack/hash_types.py23
-rw-r--r--lib/spack/spack/installer.py16
-rw-r--r--lib/spack/spack/monitor.py26
-rw-r--r--lib/spack/spack/schema/spec.py3
-rw-r--r--lib/spack/spack/spec.py110
-rw-r--r--lib/spack/spack/test/bindist.py82
-rw-r--r--lib/spack/spack/test/cmd/ci.py81
-rw-r--r--lib/spack/spack/test/cmd/env.py10
-rw-r--r--lib/spack/spack/test/cmd/install.py91
-rw-r--r--lib/spack/spack/test/cmd/test.py8
-rw-r--r--lib/spack/spack/test/concretize.py3
-rw-r--r--lib/spack/spack/test/directory_layout.py8
-rw-r--r--lib/spack/spack/test/monitor.py8
-rw-r--r--lib/spack/spack/test/spec_semantics.py102
-rw-r--r--lib/spack/spack/test/spec_syntax.py14
-rw-r--r--lib/spack/spack/test/spec_yaml.py40
-rwxr-xr-xshare/spack/spack-completion.bash4
28 files changed, 336 insertions, 712 deletions
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index a3e1f77f84..381a6af39f 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -182,7 +182,6 @@ class BinaryCacheIndex(object):
for indexed_spec in spec_list:
dag_hash = indexed_spec.dag_hash()
- full_hash = indexed_spec._full_hash
if dag_hash not in self._mirrors_for_spec:
self._mirrors_for_spec[dag_hash] = []
@@ -190,11 +189,8 @@ class BinaryCacheIndex(object):
for entry in self._mirrors_for_spec[dag_hash]:
# A binary mirror can only have one spec per DAG hash, so
# if we already have an entry under this DAG hash for this
- # mirror url, we may need to replace the spec associated
- # with it (but only if it has a different full_hash).
+ # mirror url, we're done.
if entry['mirror_url'] == mirror_url:
- if full_hash and full_hash != entry['spec']._full_hash:
- entry['spec'] = indexed_spec
break
else:
self._mirrors_for_spec[dag_hash].append({
@@ -403,6 +399,11 @@ class BinaryCacheIndex(object):
hash_fetch_url = url_util.join(
mirror_url, _build_cache_relative_path, 'index.json.hash')
+ if not web_util.url_exists(index_fetch_url):
+ # A binary mirror is not required to have an index, so avoid
+ # raising FetchCacheError in that case.
+ return False
+
old_cache_key = None
fetched_hash = None
@@ -790,35 +791,6 @@ def generate_package_index(cache_prefix):
tty.debug('Retrieving spec descriptor files from {0} to build index'.format(
cache_prefix))
- all_mirror_specs = {}
-
- for file_path in file_list:
- try:
- spec_url = url_util.join(cache_prefix, file_path)
- tty.debug('fetching {0}'.format(spec_url))
- _, _, spec_file = web_util.read_from_url(spec_url)
- spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
- # Need full spec.json name or this gets confused with index.json.
- if spec_url.endswith('.json'):
- spec_dict = sjson.load(spec_file_contents)
- s = Spec.from_json(spec_file_contents)
- elif spec_url.endswith('.yaml'):
- spec_dict = syaml.load(spec_file_contents)
- s = Spec.from_yaml(spec_file_contents)
- all_mirror_specs[s.dag_hash()] = {
- 'spec_url': spec_url,
- 'spec': s,
- 'num_deps': len(list(s.traverse(root=False))),
- 'binary_cache_checksum': spec_dict['binary_cache_checksum'],
- 'buildinfo': spec_dict['buildinfo'],
- }
- except (URLError, web_util.SpackWebError) as url_err:
- tty.error('Error reading specfile: {0}'.format(file_path))
- tty.error(url_err)
-
- sorted_specs = sorted(all_mirror_specs.keys(),
- key=lambda k: all_mirror_specs[k]['num_deps'])
-
tmpdir = tempfile.mkdtemp()
db_root_dir = os.path.join(tmpdir, 'db_root')
db = spack_db.Database(None, db_dir=db_root_dir,
@@ -826,63 +798,33 @@ def generate_package_index(cache_prefix):
record_fields=['spec', 'ref_count', 'in_buildcache'])
try:
- tty.debug('Specs sorted by number of dependencies:')
- for dag_hash in sorted_specs:
- spec_record = all_mirror_specs[dag_hash]
- s = spec_record['spec']
- num_deps = spec_record['num_deps']
- tty.debug(' {0}/{1} -> {2}'.format(
- s.name, dag_hash[:7], num_deps))
- if num_deps > 0:
- # Check each of this spec's dependencies (which we have already
- # processed), as they are the source of truth for their own
- # full hash. If the full hash we have for any deps does not
- # match what those deps have themselves, then we need to splice
- # this spec with those deps, and push this spliced spec
- # (spec.json file) back to the mirror, as well as update the
- # all_mirror_specs dictionary with this spliced spec.
- to_splice = []
- for dep in s.dependencies():
- dep_dag_hash = dep.dag_hash()
- if dep_dag_hash in all_mirror_specs:
- true_dep = all_mirror_specs[dep_dag_hash]['spec']
- if true_dep.full_hash() != dep.full_hash():
- to_splice.append(true_dep)
-
- if to_splice:
- tty.debug(' needs the following deps spliced:')
- for true_dep in to_splice:
- tty.debug(' {0}/{1}'.format(
- true_dep.name, true_dep.dag_hash()[:7]))
- s = s.splice(true_dep, True)
-
- # Push this spliced spec back to the mirror
- spliced_spec_dict = s.to_dict(hash=ht.full_hash)
- for key in ['binary_cache_checksum', 'buildinfo']:
- spliced_spec_dict[key] = spec_record[key]
-
- temp_json_path = os.path.join(tmpdir, 'spliced.spec.json')
- with open(temp_json_path, 'w') as fd:
- fd.write(sjson.dump(spliced_spec_dict))
-
- spliced_spec_url = spec_record['spec_url']
- web_util.push_to_url(
- temp_json_path, spliced_spec_url, keep_original=False)
- tty.debug(' spliced and wrote {0}'.format(
- spliced_spec_url))
- spec_record['spec'] = s
-
- db.add(s, None)
- db.mark(s, 'in_buildcache', True)
-
- # Now that we have fixed any old specfiles that might have had the wrong
- # full hash for their dependencies, we can generate the index, compute
- # the hash, and push those files to the mirror.
+ for file_path in file_list:
+ try:
+ spec_url = url_util.join(cache_prefix, file_path)
+ tty.debug('fetching {0}'.format(spec_url))
+ _, _, spec_file = web_util.read_from_url(spec_url)
+ spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
+ # Need full spec.json name or this gets confused with index.json.
+ if spec_url.endswith('.json'):
+ spec_dict = sjson.load(spec_file_contents)
+ s = Spec.from_json(spec_file_contents)
+ elif spec_url.endswith('.yaml'):
+ spec_dict = syaml.load(spec_file_contents)
+ s = Spec.from_yaml(spec_file_contents)
+ if s:
+ db.add(s, None)
+ db.mark(s, 'in_buildcache', True)
+ except (URLError, web_util.SpackWebError) as url_err:
+ tty.error('Error reading specfile: {0}'.format(file_path))
+ tty.error(url_err)
+
+ # Now generate the index, compute its hash, and push the two files to
+ # the mirror.
index_json_path = os.path.join(db_root_dir, 'index.json')
with open(index_json_path, 'w') as f:
db._write_to_file(f)
- # Read the index back in and compute it's hash
+ # Read the index back in and compute its hash
with open(index_json_path) as f:
index_string = f.read()
index_hash = compute_hash(index_string)
@@ -1610,16 +1552,14 @@ def install_single_spec(spec, allow_root=False, unsigned=False, force=False):
install_root_node(node, allow_root=allow_root, unsigned=unsigned, force=force)
-def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
+def try_direct_fetch(spec, mirrors=None):
"""
Try to find the spec directly on the configured mirrors
"""
deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
specfile_name = tarball_name(spec, '.spec.json')
specfile_is_json = True
- lenient = not full_hash_match
found_specs = []
- spec_full_hash = spec.full_hash()
for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values():
buildcache_fetch_url_yaml = url_util.join(
@@ -1649,29 +1589,21 @@ def try_direct_fetch(spec, full_hash_match=False, mirrors=None):
fetched_spec = Spec.from_yaml(specfile_contents)
fetched_spec._mark_concrete()
- # Do not recompute the full hash for the fetched spec, instead just
- # read the property.
- if lenient or fetched_spec._full_hash == spec_full_hash:
- found_specs.append({
- 'mirror_url': mirror.fetch_url,
- 'spec': fetched_spec,
- })
+ found_specs.append({
+ 'mirror_url': mirror.fetch_url,
+ 'spec': fetched_spec,
+ })
return found_specs
-def get_mirrors_for_spec(spec=None, full_hash_match=False,
- mirrors_to_check=None, index_only=False):
+def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
"""
Check if concrete spec exists on mirrors and return a list
indicating the mirrors on which it can be found
Args:
spec (spack.spec.Spec): The spec to look for in binary mirrors
- full_hash_match (bool): If True, only includes mirrors where the spec
- full hash matches the locally computed full hash of the ``spec``
- argument. If False, any mirror which has a matching DAG hash
- is included in the results.
mirrors_to_check (dict): Optionally override the configured mirrors
with the mirrors in this dictionary.
index_only (bool): Do not attempt direct fetching of ``spec.json``
@@ -1688,29 +1620,14 @@ def get_mirrors_for_spec(spec=None, full_hash_match=False,
tty.debug("No Spack mirrors are currently configured")
return {}
- results = []
- lenient = not full_hash_match
- spec_full_hash = spec.full_hash()
-
- def filter_candidates(candidate_list):
- filtered_candidates = []
- for candidate in candidate_list:
- candidate_full_hash = candidate['spec']._full_hash
- if lenient or spec_full_hash == candidate_full_hash:
- filtered_candidates.append(candidate)
- return filtered_candidates
-
- candidates = binary_index.find_built_spec(spec)
- if candidates:
- results = filter_candidates(candidates)
+ results = binary_index.find_built_spec(spec)
# Maybe we just didn't have the latest information from the mirror, so
# try to fetch directly, unless we are only considering the indices.
if not results and not index_only:
- results = try_direct_fetch(spec,
- full_hash_match=full_hash_match,
- mirrors=mirrors_to_check)
-
+ results = try_direct_fetch(spec, mirrors=mirrors_to_check)
+ # We found a spec by the direct fetch approach, we might as well
+ # add it to our mapping.
if results:
binary_index.update_spec(spec, results)
@@ -1860,124 +1777,35 @@ def push_keys(*mirrors, **kwargs):
shutil.rmtree(tmpdir)
-def needs_rebuild(spec, mirror_url, rebuild_on_errors=False):
+def needs_rebuild(spec, mirror_url):
if not spec.concrete:
raise ValueError('spec must be concrete to check against mirror')
pkg_name = spec.name
pkg_version = spec.version
-
pkg_hash = spec.dag_hash()
- pkg_full_hash = spec.full_hash()
- tty.debug('Checking {0}-{1}, dag_hash = {2}, full_hash = {3}'.format(
- pkg_name, pkg_version, pkg_hash, pkg_full_hash))
+ tty.debug('Checking {0}-{1}, dag_hash = {2}'.format(
+ pkg_name, pkg_version, pkg_hash))
tty.debug(spec.tree())
# Try to retrieve the specfile directly, based on the known
# format of the name, in order to determine if the package
# needs to be rebuilt.
cache_prefix = build_cache_prefix(mirror_url)
- specfile_is_json = True
specfile_name = tarball_name(spec, '.spec.json')
- deprecated_specfile_name = tarball_name(spec, '.spec.yaml')
specfile_path = os.path.join(cache_prefix, specfile_name)
- deprecated_specfile_path = os.path.join(cache_prefix,
- deprecated_specfile_name)
-
- result_of_error = 'Package ({0}) will {1}be rebuilt'.format(
- spec.short_spec, '' if rebuild_on_errors else 'not ')
-
- try:
- _, _, spec_file = web_util.read_from_url(specfile_path)
- except (URLError, web_util.SpackWebError) as url_err:
- try:
- _, _, spec_file = web_util.read_from_url(deprecated_specfile_path)
- specfile_is_json = False
- except (URLError, web_util.SpackWebError) as url_err_y:
- err_msg = [
- 'Unable to determine whether {0} needs rebuilding,',
- ' caught exception attempting to read from {1} or {2}.',
- ]
- tty.error(''.join(err_msg).format(
- spec.short_spec,
- specfile_path,
- deprecated_specfile_path))
- tty.debug(url_err)
- tty.debug(url_err_y)
- tty.warn(result_of_error)
- return rebuild_on_errors
-
- spec_file_contents = codecs.getreader('utf-8')(spec_file).read()
- if not spec_file_contents:
- tty.error('Reading {0} returned nothing'.format(
- specfile_path if specfile_is_json else deprecated_specfile_path))
- tty.warn(result_of_error)
- return rebuild_on_errors
-
- spec_dict = (sjson.load(spec_file_contents)
- if specfile_is_json else syaml.load(spec_file_contents))
-
- try:
- nodes = spec_dict['spec']['nodes']
- except KeyError:
- # Prior node dict format omitted 'nodes' key
- nodes = spec_dict['spec']
- name = spec.name
-
- # In the old format:
- # The "spec" key represents a list of objects, each with a single
- # key that is the package name. While the list usually just contains
- # a single object, we iterate over the list looking for the object
- # with the name of this concrete spec as a key, out of an abundance
- # of caution.
- # In format version 2:
- # ['spec']['nodes'] is still a list of objects, but with a
- # multitude of keys. The list will commonly contain many objects, and in the
- # case of build specs, it is highly likely that the same name will occur
- # once as the actual package, and then again as the build provenance of that
- # same package. Hence format version 2 matches on the dag hash, not name.
- if nodes and 'name' not in nodes[0]:
- # old style
- cached_pkg_specs = [item[name] for item in nodes if name in item]
- elif nodes and spec_dict['spec']['_meta']['version'] == 2:
- cached_pkg_specs = [item for item in nodes
- if item[ht.dag_hash.name] == spec.dag_hash()]
- cached_target = cached_pkg_specs[0] if cached_pkg_specs else None
-
- # If either the full_hash didn't exist in the specfile, or it
- # did, but didn't match the one we computed locally, then we should
- # just rebuild. This can be simplified once the dag_hash and the
- # full_hash become the same thing.
- rebuild = False
-
- if not cached_target:
- reason = 'did not find spec in specfile contents'
- rebuild = True
- elif ht.full_hash.name not in cached_target:
- reason = 'full_hash was missing from remote specfile'
- rebuild = True
- else:
- full_hash = cached_target[ht.full_hash.name]
- if full_hash != pkg_full_hash:
- reason = 'hash mismatch, remote = {0}, local = {1}'.format(
- full_hash, pkg_full_hash)
- rebuild = True
-
- if rebuild:
- tty.msg('Rebuilding {0}, reason: {1}'.format(
- spec.short_spec, reason))
- tty.msg(spec.tree())
- return rebuild
+ # Only check for the presence of the json version of the spec. If the
+ # mirror only has the yaml version, or doesn't have the spec at all, we
+ # need to rebuild.
+ return not web_util.url_exists(specfile_path)
-def check_specs_against_mirrors(mirrors, specs, output_file=None,
- rebuild_on_errors=False):
+def check_specs_against_mirrors(mirrors, specs, output_file=None):
"""Check all the given specs against buildcaches on the given mirrors and
- determine if any of the specs need to be rebuilt. Reasons for needing to
- rebuild include binary cache for spec isn't present on a mirror, or it is
- present but the full_hash has changed since last time spec was built.
+ determine if any of the specs need to be rebuilt. Specs need to be rebuilt
+ when their hash doesn't exist in the mirror.
Arguments:
mirrors (dict): Mirrors to check against
@@ -1985,8 +1813,6 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
output_file (str): Path to output file to be written. If provided,
mirrors with missing or out-of-date specs will be formatted as a
JSON object and written to this file.
- rebuild_on_errors (bool): Treat any errors encountered while
- checking specs as a signal to rebuild package.
Returns: 1 if any spec was out-of-date on any mirror, 0 otherwise.
@@ -1998,7 +1824,7 @@ def check_specs_against_mirrors(mirrors, specs, output_file=None,
rebuild_list = []
for spec in specs:
- if needs_rebuild(spec, mirror.fetch_url, rebuild_on_errors):
+ if needs_rebuild(spec, mirror.fetch_url):
rebuild_list.append({
'short_spec': spec.short_spec,
'hash': spec.dag_hash()
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index 9419869232..447b14c584 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -399,7 +399,7 @@ def compute_spec_deps(spec_list, check_index_only=False):
continue
up_to_date_mirrors = bindist.get_mirrors_for_spec(
- spec=s, full_hash_match=True, index_only=check_index_only)
+ spec=s, index_only=check_index_only)
skey = spec_deps_key(s)
spec_labels[skey] = {
@@ -801,7 +801,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
max_needs_job = ''
# If this is configured, spack will fail "spack ci generate" if it
- # generates any full hash which exists under the broken specs url.
+ # generates any hash which exists under the broken specs url.
broken_spec_urls = None
if broken_specs_url:
if broken_specs_url.startswith('http'):
@@ -829,9 +829,8 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
root_spec = spec_record['rootSpec']
pkg_name = pkg_name_from_spec_label(spec_label)
release_spec = root_spec[pkg_name]
- release_spec_full_hash = release_spec.full_hash()
release_spec_dag_hash = release_spec.dag_hash()
- release_spec_build_hash = release_spec.build_hash()
+ release_spec_runtime_hash = release_spec.runtime_hash()
if prune_untouched_packages:
if release_spec not in affected_specs:
@@ -901,8 +900,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
'SPACK_ROOT_SPEC': format_root_spec(
root_spec, main_phase, strip_compilers),
'SPACK_JOB_SPEC_DAG_HASH': release_spec_dag_hash,
- 'SPACK_JOB_SPEC_BUILD_HASH': release_spec_build_hash,
- 'SPACK_JOB_SPEC_FULL_HASH': release_spec_full_hash,
+ 'SPACK_JOB_SPEC_RUNTIME_HASH': release_spec_runtime_hash,
'SPACK_JOB_SPEC_PKG_NAME': release_spec.name,
'SPACK_COMPILER_ACTION': compiler_action
}
@@ -1006,9 +1004,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
continue
if (broken_spec_urls is not None and
- release_spec_full_hash in broken_spec_urls):
+ release_spec_dag_hash in broken_spec_urls):
known_broken_specs_encountered.append('{0} ({1})'.format(
- release_spec, release_spec_full_hash))
+ release_spec, release_spec_dag_hash))
if artifacts_root:
job_dependencies.append({
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 39a20fcaf2..e82cd0b564 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -161,11 +161,6 @@ def setup_parser(subparser):
help=('Check single spec from json or yaml file instead of release ' +
'specs file'))
- check.add_argument(
- '--rebuild-on-error', default=False, action='store_true',
- help="Default to rebuilding packages if errors are encountered " +
- "during the process of checking whether rebuilding is needed")
-
check.set_defaults(func=check_fn)
# Download tarball and specfile
@@ -361,7 +356,7 @@ def list_fn(args):
try:
specs = bindist.update_cache_and_get_specs()
except bindist.FetchCacheError as e:
- tty.error(e)
+ tty.die(e)
if not args.allarch:
arch = spack.spec.Spec.default_arch()
@@ -430,7 +425,7 @@ def check_fn(args):
sys.exit(0)
sys.exit(bindist.check_specs_against_mirrors(
- configured_mirrors, specs, args.output_file, args.rebuild_on_error))
+ configured_mirrors, specs, args.output_file))
def download_fn(args):
@@ -486,7 +481,7 @@ def save_specfile_fn(args):
else:
root_spec = Spec(args.root_spec)
root_spec.concretize()
- root_spec_as_json = root_spec.to_json(hash=ht.build_hash)
+ root_spec_as_json = root_spec.to_json(hash=ht.dag_hash)
spec_format = 'yaml' if args.root_specfile.endswith('yaml') else 'json'
save_dependency_specfiles(
root_spec_as_json, args.specfile_dir, args.specs.split(), spec_format)
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
index e68adf823d..9687aceac8 100644
--- a/lib/spack/spack/cmd/ci.py
+++ b/lib/spack/spack/cmd/ci.py
@@ -167,8 +167,7 @@ def ci_reindex(args):
def ci_rebuild(args):
"""Check a single spec against the remote mirror, and rebuild it from
- source if the mirror does not contain the full hash match of the spec
- as computed locally. """
+ source if the mirror does not contain the hash. """
env = spack.cmd.require_active_env(cmd_name='ci rebuild')
# Make sure the environment is "gitlab-enabled", or else there's nothing
@@ -280,8 +279,8 @@ def ci_rebuild(args):
env, root_spec, job_spec_pkg_name, compiler_action)
job_spec = spec_map[job_spec_pkg_name]
- job_spec_yaml_file = '{0}.yaml'.format(job_spec_pkg_name)
- job_spec_yaml_path = os.path.join(repro_dir, job_spec_yaml_file)
+ job_spec_json_file = '{0}.json'.format(job_spec_pkg_name)
+ job_spec_json_path = os.path.join(repro_dir, job_spec_json_file)
# To provide logs, cdash reports, etc for developer download/perusal,
# these things have to be put into artifacts. This means downstream
@@ -335,23 +334,23 @@ def ci_rebuild(args):
# using a compiler already installed on the target system).
spack_ci.configure_compilers(compiler_action)
- # Write this job's spec yaml into the reproduction directory, and it will
+ # Write this job's spec json into the reproduction directory, and it will
# also be used in the generated "spack install" command to install the spec
- tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
- with open(job_spec_yaml_path, 'w') as fd:
- fd.write(job_spec.to_yaml(hash=ht.build_hash))
+ tty.debug('job concrete spec path: {0}'.format(job_spec_json_path))
+ with open(job_spec_json_path, 'w') as fd:
+ fd.write(job_spec.to_json(hash=ht.dag_hash))
- # Write the concrete root spec yaml into the reproduction directory
- root_spec_yaml_path = os.path.join(repro_dir, 'root.yaml')
- with open(root_spec_yaml_path, 'w') as fd:
- fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
+ # Write the concrete root spec json into the reproduction directory
+ root_spec_json_path = os.path.join(repro_dir, 'root.json')
+ with open(root_spec_json_path, 'w') as fd:
+ fd.write(spec_map['root'].to_json(hash=ht.dag_hash))
# Write some other details to aid in reproduction into an artifact
repro_file = os.path.join(repro_dir, 'repro.json')
repro_details = {
'job_name': ci_job_name,
- 'job_spec_yaml': job_spec_yaml_file,
- 'root_spec_yaml': 'root.yaml',
+ 'job_spec_json': job_spec_json_file,
+ 'root_spec_json': 'root.json',
'ci_project_dir': ci_project_dir
}
with open(repro_file, 'w') as fd:
@@ -366,25 +365,24 @@ def ci_rebuild(args):
fd.write(b'\n')
# If we decided there should be a temporary storage mechanism, add that
- # mirror now so it's used when we check for a full hash match already
+ # mirror now so it's used when we check for a hash match already
# built for this spec.
if pipeline_mirror_url:
spack.mirror.add(spack_ci.TEMP_STORAGE_MIRROR_NAME,
pipeline_mirror_url,
cfg.default_modify_scope())
- # Check configured mirrors for a built spec with a matching full hash
- matches = bindist.get_mirrors_for_spec(
- job_spec, full_hash_match=True, index_only=False)
+ # Check configured mirrors for a built spec with a matching hash
+ matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
if matches:
- # Got a full hash match on at least one configured mirror. All
+ # Got a hash match on at least one configured mirror. All
# matches represent the fully up-to-date spec, so should all be
# equivalent. If artifacts mirror is enabled, we just pick one
# of the matches and download the buildcache files from there to
# the artifacts, so they're available to be used by dependent
# jobs in subsequent stages.
- tty.msg('No need to rebuild {0}, found full hash match at: '.format(
+ tty.msg('No need to rebuild {0}, found hash match at: '.format(
job_spec_pkg_name))
for match in matches:
tty.msg(' {0}'.format(match['mirror_url']))
@@ -403,7 +401,7 @@ def ci_rebuild(args):
# Now we are done and successful
sys.exit(0)
- # No full hash match anywhere means we need to rebuild spec
+ # No hash match anywhere means we need to rebuild spec
# Start with spack arguments
install_args = [base_arg for base_arg in CI_REBUILD_INSTALL_BASE_ARGS]
@@ -415,7 +413,6 @@ def ci_rebuild(args):
install_args.extend([
'install',
'--keep-stage',
- '--require-full-hash-match',
])
can_verify = spack_ci.can_verify_binaries()
@@ -443,8 +440,8 @@ def ci_rebuild(args):
# TODO: once we have the concrete spec registry, use the DAG hash
# to identify the spec to install, rather than the concrete spec
- # yaml file.
- install_args.extend(['-f', job_spec_yaml_path])
+ # json file.
+ install_args.extend(['-f', job_spec_json_path])
tty.debug('Installing {0} from source'.format(job_spec.name))
tty.debug('spack install arguments: {0}'.format(
@@ -477,13 +474,13 @@ def ci_rebuild(args):
tty.debug('spack install exited {0}'.format(install_exit_code))
# If a spec fails to build in a spack develop pipeline, we add it to a
- # list of known broken full hashes. This allows spack PR pipelines to
+ # list of known broken hashes. This allows spack PR pipelines to
# avoid wasting compute cycles attempting to build those hashes.
if install_exit_code == INSTALL_FAIL_CODE and spack_is_develop_pipeline:
tty.debug('Install failed on develop')
if 'broken-specs-url' in gitlab_ci:
broken_specs_url = gitlab_ci['broken-specs-url']
- dev_fail_hash = job_spec.full_hash()
+ dev_fail_hash = job_spec.dag_hash()
broken_spec_path = url_util.join(broken_specs_url, dev_fail_hash)
tty.msg('Reporting broken develop build as: {0}'.format(
broken_spec_path))
@@ -494,7 +491,7 @@ def ci_rebuild(args):
'broken-spec': {
'job-url': get_env_var('CI_JOB_URL'),
'pipeline-url': get_env_var('CI_PIPELINE_URL'),
- 'concrete-spec-yaml': job_spec.to_dict(hash=ht.full_hash)
+ 'concrete-spec-dict': job_spec.to_dict(hash=ht.dag_hash)
}
}
@@ -539,7 +536,7 @@ def ci_rebuild(args):
# per-PR mirror, if this is a PR pipeline
if buildcache_mirror_url:
spack_ci.push_mirror_contents(
- env, job_spec_yaml_path, buildcache_mirror_url, sign_binaries
+ env, job_spec_json_path, buildcache_mirror_url, sign_binaries
)
# Create another copy of that buildcache in the per-pipeline
@@ -548,14 +545,14 @@ def ci_rebuild(args):
# prefix is set)
if pipeline_mirror_url:
spack_ci.push_mirror_contents(
- env, job_spec_yaml_path, pipeline_mirror_url, sign_binaries
+ env, job_spec_json_path, pipeline_mirror_url, sign_binaries
)
# If this is a develop pipeline, check if the spec that we just built is
# on the broken-specs list. If so, remove it.
if spack_is_develop_pipeline and 'broken-specs-url' in gitlab_ci:
broken_specs_url = gitlab_ci['broken-specs-url']
- just_built_hash = job_spec.full_hash()
+ just_built_hash = job_spec.dag_hash()
broken_spec_path = url_util.join(broken_specs_url, just_built_hash)
if web_util.url_exists(broken_spec_path):
tty.msg('Removing {0} from the list of broken specs'.format(
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 974f10f047..bab245f736 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -47,7 +47,6 @@ def update_kwargs_from_args(args, kwargs):
'explicit': True, # Always true for install command
'stop_at': args.until,
'unsigned': args.unsigned,
- 'full_hash_match': args.full_hash_match,
})
kwargs.update({
@@ -118,11 +117,6 @@ which is useful for CI pipeline troubleshooting""")
dest='unsigned', default=False,
help="do not check signatures of binary packages")
subparser.add_argument(
- '--require-full-hash-match', action='store_true',
- dest='full_hash_match', default=False, help="""when installing from
-binary mirrors, do not install binary package unless the full hash of the
-remote spec matches that of the local spec""")
- subparser.add_argument(
'--show-log-on-error', action='store_true',
help="print full build log to stderr if build fails")
subparser.add_argument(
@@ -470,7 +464,7 @@ environment variables:
})
# If we are using the monitor, we send configs. and create build
- # The full_hash is the main package id, the build_hash for others
+ # The dag_hash is the main package id
if args.use_monitor and specs:
monitor.new_configuration(specs)
install_specs(args, kwargs, zip(abstract_specs, specs))
diff --git a/lib/spack/spack/cmd/solve.py b/lib/spack/spack/cmd/solve.py
index b3b9519ea5..d4dc6a37b7 100644
--- a/lib/spack/spack/cmd/solve.py
+++ b/lib/spack/spack/cmd/solve.py
@@ -151,9 +151,9 @@ def solve(parser, args):
# With -y, just print YAML to output.
if args.format == 'yaml':
# use write because to_yaml already has a newline.
- sys.stdout.write(spec.to_yaml(hash=ht.build_hash))
+ sys.stdout.write(spec.to_yaml(hash=ht.dag_hash))
elif args.format == 'json':
- sys.stdout.write(spec.to_json(hash=ht.build_hash))
+ sys.stdout.write(spec.to_json(hash=ht.dag_hash))
else:
sys.stdout.write(
spec.tree(color=sys.stdout.isatty(), **kwargs))
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index 8ad5f777a5..3a0398ca23 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -52,8 +52,8 @@ for further documentation regarding the spec syntax, see:
'-N', '--namespaces', action='store_true', default=False,
help='show fully qualified package names')
subparser.add_argument(
- '--hash-type', default="build_hash",
- choices=['build_hash', 'full_hash', 'dag_hash'],
+ '--hash-type', default="dag_hash",
+ choices=['runtime_hash', 'dag_hash'],
help='generate spec with a particular hash type.')
subparser.add_argument(
'-t', '--types', action='store_true', default=False,
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index 30679d438a..f15fd39655 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -332,13 +332,18 @@ def _report_suite_results(test_suite, args, constraints):
.format(results_desc, test_suite.name, matching))
results = {}
+ tty.msg('test results')
with open(test_suite.results_file, 'r') as f:
for line in f:
pkg_id, status = line.split()
results[pkg_id] = status
+ tty.msg(' {0}'.format(pkg_id))
+
+ tty.msg('test specs:')
failed, skipped, untested = 0, 0, 0
for pkg_id in test_specs:
+ tty.msg(' {0}'.format(pkg_id))
if pkg_id in results:
status = results[pkg_id]
if status == 'FAILED':
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index 6a16a83e1c..8537d345fd 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -433,7 +433,7 @@ class Database(object):
.format(spec.name))
return os.path.join(self._failure_dir,
- '{0}-{1}'.format(spec.name, spec.full_hash()))
+ '{0}-{1}'.format(spec.name, spec.dag_hash()))
def clear_all_failures(self):
"""Force remove install failure tracking files."""
@@ -645,8 +645,12 @@ class Database(object):
# TODO: fix this before we support multiple install locations.
database = {
'database': {
+ # TODO: move this to a top-level _meta section if we ever
+ # TODO: bump the DB version to 7
+ 'version': str(_db_version),
+
+ # dictionary of installation records, keyed by DAG hash
'installs': installs,
- 'version': str(_db_version)
}
}
@@ -1092,6 +1096,8 @@ class Database(object):
"Specs added to DB must be concrete.")
key = spec.dag_hash()
+ spec_run_hash = spec._runtime_hash
+ spec_pkg_hash = spec._package_hash
upstream, record = self.query_by_spec_hash(key)
if upstream:
return
@@ -1153,10 +1159,11 @@ class Database(object):
record.ref_count += 1
# Mark concrete once everything is built, and preserve
- # the original hash of concrete specs.
+ # the original hashes of concrete specs.
new_spec._mark_concrete()
new_spec._hash = key
- new_spec._full_hash = spec._full_hash
+ new_spec._runtime_hash = spec_run_hash
+ new_spec._package_hash = spec_pkg_hash
else:
# It is already in the database
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 33d3832a4c..56ba01c7b4 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -110,13 +110,9 @@ class DirectoryLayout(object):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, 'w') as f:
- # The hash the the projection is the DAG hash but we write out the
- # full provenance by full hash so it's availabe if we want it later
- # extension = os.path.splitext(path)[-1].lower()
- # if 'json' in extension:
- spec.to_json(f, hash=ht.full_hash)
- # elif 'yaml' in extension:
- # spec.to_yaml(f, hash=ht.full_hash)
+ # The hash of the projection is the DAG hash which contains
+ # the full provenance, so it's availabe if we want it later
+ spec.to_json(f, hash=ht.dag_hash)
def write_host_environment(self, spec):
"""The host environment is a json file with os, kernel, and spack
diff --git a/lib/spack/spack/environment/environment.py b/lib/spack/spack/environment/environment.py
index 7359bdd591..90e7962b0f 100644
--- a/lib/spack/spack/environment/environment.py
+++ b/lib/spack/spack/environment/environment.py
@@ -1819,6 +1819,9 @@ class Environment(object):
json_specs_by_hash = d['concrete_specs']
root_hashes = set(self.concretized_order)
+ import pdb
+ pdb.set_trace()
+
specs_by_hash = {}
for dag_hash, node_dict in json_specs_by_hash.items():
spec = Spec.from_node_dict(node_dict)
@@ -1856,12 +1859,20 @@ class Environment(object):
# trees and binary mirrors, and as such, must be considered the
# permanent id of the spec.
dag_hash = spec.dag_hash() # == full_hash()
- build_hash = spec.build_hash()
runtime_hash = spec.runtime_hash() # == old dag_hash()
- if runtime_hash in root_hashes:
+ if dag_hash in root_hashes:
+ # This spec's dag hash (now computed with build deps and pkg
+ # hash) is in the keys found in the file, so we're looking at
+ # the current format
+ pass
+ elif runtime_hash in root_hashes:
+ # This spec's runtime hash (the old dag hash w/out build deps,
+ # etc) is a key in this lockfile, so this is the oldest format
old_hash_to_new[runtime_hash] = dag_hash
- elif build_hash in root_hashes:
+ else:
+ # Neither of this spec's hashes appeared as a key in the lock
+ # file, so
old_hash_to_new[build_hash] = dag_hash
if (runtime_hash in root_hashes or
diff --git a/lib/spack/spack/hash_types.py b/lib/spack/spack/hash_types.py
index b6e8f12a74..0f8d20fb5b 100644
--- a/lib/spack/spack/hash_types.py
+++ b/lib/spack/spack/hash_types.py
@@ -39,16 +39,6 @@ dag_hash = SpecHashDescriptor(
deptype=('build', 'link', 'run'), package_hash=True, name='hash')
-#: Same as dag_hash; old name.
-full_hash = SpecHashDescriptor(
- deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
-
-
-#: Hash descriptor that includes build dependencies.
-build_hash = SpecHashDescriptor(
- deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
-
-
#: Hash descriptor used only to transfer a DAG, as is, across processes
process_hash = SpecHashDescriptor(
deptype=('build', 'link', 'run', 'test'),
@@ -57,7 +47,18 @@ process_hash = SpecHashDescriptor(
)
-#: Package hash used as part of full hash
+#: Package hash used as part of dag hash
package_hash = SpecHashDescriptor(
deptype=(), package_hash=True, name='package_hash',
override=lambda s: s.package.content_hash())
+
+
+# Deprecated hash types, no longer used, but needed to understand old serialized
+# spec formats
+
+full_hash = SpecHashDescriptor(
+ deptype=('build', 'link', 'run'), package_hash=True, name='full_hash')
+
+
+build_hash = SpecHashDescriptor(
+ deptype=('build', 'link', 'run'), package_hash=False, name='build_hash')
diff --git a/lib/spack/spack/installer.py b/lib/spack/spack/installer.py
index f99d6baed7..fd1ed15fe7 100644
--- a/lib/spack/spack/installer.py
+++ b/lib/spack/spack/installer.py
@@ -260,8 +260,7 @@ def _hms(seconds):
return ' '.join(parts)
-def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
- full_hash_match=False):
+def _install_from_cache(pkg, cache_only, explicit, unsigned=False):
"""
Extract the package from binary cache
@@ -278,7 +277,7 @@ def _install_from_cache(pkg, cache_only, explicit, unsigned=False,
``False`` otherwise
"""
installed_from_cache = _try_install_from_binary_cache(
- pkg, explicit, unsigned=unsigned, full_hash_match=full_hash_match)
+ pkg, explicit, unsigned=unsigned)
pkg_id = package_id(pkg)
if not installed_from_cache:
pre = 'No binary for {0} found'.format(pkg_id)
@@ -390,8 +389,7 @@ def _process_binary_cache_tarball(pkg, binary_spec, explicit, unsigned,
return True
-def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
- full_hash_match=False):
+def _try_install_from_binary_cache(pkg, explicit, unsigned=False):
"""
Try to extract the package from binary cache.
@@ -403,8 +401,7 @@ def _try_install_from_binary_cache(pkg, explicit, unsigned=False,
"""
pkg_id = package_id(pkg)
tty.debug('Searching for binary cache of {0}'.format(pkg_id))
- matches = binary_distribution.get_mirrors_for_spec(
- pkg.spec, full_hash_match=full_hash_match)
+ matches = binary_distribution.get_mirrors_for_spec(pkg.spec)
if not matches:
return False
@@ -1204,7 +1201,6 @@ class PackageInstaller(object):
install_args = task.request.install_args
cache_only = install_args.get('cache_only')
explicit = task.explicit
- full_hash_match = install_args.get('full_hash_match')
tests = install_args.get('tests')
unsigned = install_args.get('unsigned')
use_cache = install_args.get('use_cache')
@@ -1217,8 +1213,7 @@ class PackageInstaller(object):
# Use the binary cache if requested
if use_cache and \
- _install_from_cache(pkg, cache_only, explicit, unsigned,
- full_hash_match):
+ _install_from_cache(pkg, cache_only, explicit, unsigned):
self._update_installed(task)
if task.compiler:
spack.compilers.add_compilers_to_config(
@@ -2306,7 +2301,6 @@ class BuildRequest(object):
('dirty', False),
('fail_fast', False),
('fake', False),
- ('full_hash_match', False),
('install_deps', True),
('install_package', True),
('install_source', False),
diff --git a/lib/spack/spack/monitor.py b/lib/spack/spack/monitor.py
index 9634f96833..85ffc3aa2e 100644
--- a/lib/spack/spack/monitor.py
+++ b/lib/spack/spack/monitor.py
@@ -132,7 +132,7 @@ class SpackMonitorClient:
self.tags = tags
self.save_local = save_local
- # We keey lookup of build_id by full_hash
+ # We keey lookup of build_id by dag_hash
self.build_ids = {}
self.setup_save()
@@ -412,7 +412,7 @@ class SpackMonitorClient:
spec.concretize()
# Remove extra level of nesting
- as_dict = {"spec": spec.to_dict(hash=ht.full_hash)['spec'],
+ as_dict = {"spec": spec.to_dict(hash=ht.dag_hash)['spec'],
"spack_version": self.spack_version}
if self.save_local:
@@ -437,7 +437,7 @@ class SpackMonitorClient:
meta = spec.to_dict()['spec']
nodes = []
for node in meta.get("nodes", []):
- for hashtype in ["build_hash", "full_hash"]:
+ for hashtype in ["hash", "runtime_hash"]:
node[hashtype] = "FAILED_CONCRETIZATION"
nodes.append(node)
meta['nodes'] = nodes
@@ -470,13 +470,13 @@ class SpackMonitorClient:
"""
Retrieve a build id, either in the local cache, or query the server.
"""
- full_hash = spec.full_hash()
- if full_hash in self.build_ids:
- return self.build_ids[full_hash]
+ dag_hash = spec.dag_hash()
+ if dag_hash in self.build_ids:
+ return self.build_ids[dag_hash]
# Prepare build environment data (including spack version)
data = self.build_environment.copy()
- data['full_hash'] = full_hash
+ data['hash'] = dag_hash
# If the build should be tagged, add it
if self.tags:
@@ -494,10 +494,10 @@ class SpackMonitorClient:
data['spec'] = syaml.load(read_file(spec_file))
if self.save_local:
- return self.get_local_build_id(data, full_hash, return_response)
- return self.get_server_build_id(data, full_hash, return_response)
+ return self.get_local_build_id(data, dag_hash, return_response)
+ return self.get_server_build_id(data, dag_hash, return_response)
- def get_local_build_id(self, data, full_hash, return_response):
+ def get_local_build_id(self, data, dag_hash, return_response):
"""
Generate a local build id based on hashing the expected data
"""
@@ -510,15 +510,15 @@ class SpackMonitorClient:
return response
return bid
- def get_server_build_id(self, data, full_hash, return_response=False):
+ def get_server_build_id(self, data, dag_hash, return_response=False):
"""
Retrieve a build id from the spack monitor server
"""
response = self.do_request("builds/new/", data=sjson.dump(data))
# Add the build id to the lookup
- bid = self.build_ids[full_hash] = response['data']['build']['build_id']
- self.build_ids[full_hash] = bid
+ bid = self.build_ids[dag_hash] = response['data']['build']['build_id']
+ self.build_ids[dag_hash] = bid
# If the function is called directly, the user might want output
if return_response:
diff --git a/lib/spack/spack/schema/spec.py b/lib/spack/spack/schema/spec.py
index 44fd6dffb9..a1665092aa 100644
--- a/lib/spack/spack/schema/spec.py
+++ b/lib/spack/spack/schema/spec.py
@@ -110,8 +110,7 @@ properties = {
'properties': {
'name': {'type': 'string'},
'hash': {'type': 'string'},
- 'full_hash': {'type': 'string'},
- 'build_hash': {'type': 'string'},
+ 'runtime_hash': {'type': 'string'},
'package_hash': {'type': 'string'},
'version': {
'oneOf': [
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 46ee04ae90..422227675e 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -1192,8 +1192,7 @@ class Spec(object):
self.namespace = None
self._hash = None
- self._build_hash = None
- self._full_hash = None
+ self._runtime_hash = None
self._package_hash = None
self._dunder_hash = None
self._package = None
@@ -1210,12 +1209,12 @@ class Spec(object):
self.external_path = external_path
self.external_modules = Spec._format_module_list(external_modules)
- # Older spack versions did not compute full_hash or build_hash,
- # and we may not have the necessary information to recompute them
- # if we read in old specs. Old concrete specs are marked "final"
- # when read in to indicate that we shouldn't recompute full_hash
- # or build_hash. New specs are not final; we can lazily compute
- # their hashes.
+ # Older spack versions may have either computed different hashes or
+ # computed them differently, and we may not have the necessary
+ # information to recompute them if we read in old specs.
+ # Old concrete specs are marked "final" when read in to indicate
+ # that we shouldn't recompute the current dag_hash. New specs are
+ # not final; we can lazily compute their hashes.
self._hashes_final = False
# This attribute is used to store custom information for
@@ -1804,26 +1803,12 @@ class Spec(object):
"""This is Spack's default hash, used to identify installations.
Same as the full hash (includes package hash and build/link/run deps).
+ Tells us when package files and any dependencies have changes.
NOTE: Versions of Spack prior to 0.18 only included link and run deps.
- """
- return self._cached_hash(ht.dag_hash, length)
-
- def full_hash(self, length=None):
- """Hash that includes all build and run inputs for a spec.
-
- Inputs are: the package hash (to identify the package.py),
- build, link, and run dependencies.
- """
- return self._cached_hash(ht.full_hash, length)
-
- def build_hash(self, length=None):
- """Hash used to store specs in environments.
- This hash includes build dependencies, and we need to preserve
- them to be able to rebuild an entire environment for a user.
"""
- return self._cached_hash(ht.build_hash, length)
+ return self._cached_hash(ht.dag_hash, length)
def process_hash(self, length=None):
"""Hash used to transfer specs among processes.
@@ -1833,6 +1818,7 @@ class Spec(object):
"""
return self._cached_hash(ht.process_hash, length)
+
def dag_hash_bit_prefix(self, bits):
"""Get the first <bits> bits of the DAG hash as an integer type."""
return spack.util.hash.base32_prefix_bits(self.dag_hash(), bits)
@@ -2002,7 +1988,7 @@ class Spec(object):
"dependencies": [
{
"name": "readline",
- "build_hash": "4f47cggum7p4qmp3xna4hi547o66unva",
+ "hash": "4f47cggum7p4qmp3xna4hi547o66unva",
"type": [
"build",
"link"
@@ -2010,16 +1996,15 @@ class Spec(object):
},
{
"name": "zlib",
- "build_hash": "uvgh6p7rhll4kexqnr47bvqxb3t33jtq",
+ "hash": "uvgh6p7rhll4kexqnr47bvqxb3t33jtq",
"type": [
"build",
"link"
]
}
],
- "hash": "d2yzqp2highd7sn4nr5ndkw3ydcrlhtk",
- "full_hash": "tve45xfqkfgmzwcyfetze2z6syrg7eaf",
- "build_hash": "tsjnz7lgob7bu2wd4sqzzjenxewc2zha"
+ "runtime_hash": "d2yzqp2highd7sn4nr5ndkw3ydcrlhtk",
+ "hash": "tve45xfqkfgmzwcyfetze2z6syrg7eaf",
},
# ... more node dicts for readline and its dependencies ...
]
@@ -2071,37 +2056,29 @@ class Spec(object):
node = self.to_node_dict(hash)
node[ht.dag_hash.name] = self.dag_hash()
- # full_hash and build_hash are lazily computed -- but if we write
- # a spec out, we want them to be included. This is effectively
- # the last chance we get to compute them accurately.
+ # dag_hash is lazily computed -- but if we write a spec out, we want it
+ # to be included. This is effectively the last chance we get to compute
+ # it accurately.
if self.concrete:
- # build and full hashes can be written out if:
- # 1. they're precomputed (i.e. we read them from somewhere
- # and they were already on the spec
- # 2. we can still compute them lazily (i.e. we just made them and
+ # dag_hash can be written out if:
+ # 1. it's precomputed (i.e. we read it from somewhere
+ # and it was already on the spec)
+ # 2. we can still compute it lazily (i.e. we just made the spec and
# have the full dependency graph on-hand)
#
- # we want to avoid recomputing either hash for specs we read
+ # we want to avoid recomputing the dag_hash for specs we read
# in from the DB or elsewhere, as we may not have the info
# (like patches, package versions, etc.) that we need to
- # compute them. Unknown hashes are better than wrong hashes.
- write_full_hash = (
- self._hashes_final and self._full_hash or # cached and final
+ # compute it. Unknown hashes are better than wrong hashes.
+ write_dag_hash = (
+ self._hashes_final and self._hash or # cached and final
not self._hashes_final) # lazily compute
- if write_full_hash:
- node[ht.full_hash.name] = self.full_hash()
-
- write_build_hash = 'build' in hash.deptype and (
- self._hashes_final and self._build_hash or # cached and final
- not self._hashes_final) # lazily compute
- if write_build_hash:
- node[ht.build_hash.name] = self.build_hash()
+ if write_dag_hash:
+ node[ht.dag_hash.name] = self.dag_hash()
else:
node['concrete'] = False
- if hash.name == 'build_hash':
- node[hash.name] = self.build_hash()
- elif hash.name == 'process_hash':
+ if hash.name == 'process_hash':
node[hash.name] = self.process_hash()
elif hash.name == 'runtime_hash':
node[hash.name] = self.runtime_hash()
@@ -2187,7 +2164,7 @@ class Spec(object):
# this spec may have been built with older packages than we have
# on-hand, and we may not have the build dependencies, so mark it
- # so we don't recompute full_hash and build_hash.
+ # so we don't recompute dag_hash.
spec._hashes_final = spec._concrete
if 'patches' in node:
@@ -2200,7 +2177,7 @@ class Spec(object):
# FIXME: Monkey patches mvar to store patches order
mvar._patches_in_order_of_appearance = patches
- # Don't read dependencies here; from_node_dict() is used by
+ # Don't read dependencies here; from_dict() is used by
# from_yaml() and from_json() to read the root *and* each dependency
# spec.
@@ -2227,7 +2204,6 @@ class Spec(object):
@staticmethod
def read_yaml_dep_specs(deps, hash_type=ht.dag_hash.name):
"""Read the DependencySpec portion of a YAML-formatted Spec.
-
This needs to be backward-compatible with older spack spec
formats so that reindex will work on old specs/databases.
"""
@@ -2247,16 +2223,11 @@ class Spec(object):
elif isinstance(elt, dict):
# new format: elements of dependency spec are keyed.
for key in (ht.dag_hash.name,
- ht.full_hash.name,
ht.build_hash.name,
+ ht.full_hash.name,
ht.runtime_hash.name,
ht.process_hash.name):
if key in elt:
- # FIXME: if the key is 'hash' it could mean the old
- # dag hash without build deps, or the new dag hash which
- # is equivalent to the full hash. If this was the old
- # dag hash, we need to keep the hash value but set the
- # key hash type to "runtime_hash".
dep_hash, deptypes = elt[key], elt['type']
hash_type = key
break
@@ -3797,20 +3768,17 @@ class Spec(object):
if self._concrete:
self._hash = other._hash
- self._build_hash = other._build_hash
self._dunder_hash = other._dunder_hash
- self._normal = True
+ self._normal = other._normal
self._full_hash = other._full_hash
self._runtime_hash = other._runtime_hash
self._package_hash = other._package_hash
else:
self._hash = None
- self._build_hash = None
self._dunder_hash = None
# Note, we could use other._normal if we are copying all deps, but
# always set it False here to avoid the complexity of checking
self._normal = False
- self._full_hash = None
self._runtime_hash = None
self._package_hash = None
@@ -4751,12 +4719,9 @@ class Spec(object):
for dep in ret.traverse(root=True, order='post'):
opposite = other_nodes if dep.name in self_nodes else self_nodes
if any(name in dep for name in opposite.keys()):
-
# package hash cannot be affected by splice
dep.clear_cached_hashes(ignore=['package_hash'])
- dep.build_hash()
- dep.full_hash()
dep.dag_hash()
return nodes[self.name]
@@ -4844,7 +4809,7 @@ def _spec_from_old_dict(data):
if 'dependencies' not in node[name]:
continue
- for dname, dhash, dtypes, _ in Spec.dependencies_from_node_dict(node):
+ for dname, _, dtypes, _ in Spec.dependencies_from_node_dict(node):
deps[name]._add_dependency(deps[dname], dtypes)
return spec
@@ -4878,7 +4843,7 @@ def _spec_from_dict(data):
break
if not any_deps: # If we never see a dependency...
- hash_type = ht.dag_hash.name # use the full_hash provenance
+ hash_type = ht.dag_hash.name
elif not hash_type: # Seen a dependency, still don't know hash_type
raise spack.error.SpecError("Spec dictionary contains malformed "
"dependencies. Old format?")
@@ -4888,10 +4853,7 @@ def _spec_from_dict(data):
# Pass 1: Create a single lookup dictionary by hash
for i, node in enumerate(nodes):
- if 'build_spec' in node.keys():
- node_hash = node[hash_type]
- else:
- node_hash = node[hash_type]
+ node_hash = node[hash_type]
node_spec = Spec.from_node_dict(node)
hash_dict[node_hash] = node
hash_dict[node_hash]['node_spec'] = node_spec
@@ -5350,7 +5312,7 @@ def save_dependency_specfiles(
json_path = os.path.join(output_directory, '{0}.json'.format(dep_name))
with open(json_path, 'w') as fd:
- fd.write(dep_spec.to_json(hash=ht.build_hash))
+ fd.write(dep_spec.to_json(hash=ht.dag_hash))
class SpecParseError(spack.error.SpecError):
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index b7d970c02a..864493c1e4 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -394,31 +394,12 @@ def test_built_spec_cache(mirror_dir):
gspec, cspec = Spec('garply').concretized(), Spec('corge').concretized()
- full_hash_map = {
- 'garply': gspec.full_hash(),
- 'corge': cspec.full_hash(),
- }
+ for s in [gspec, cspec]:
+ results = bindist.get_mirrors_for_spec(s)
+ assert(any([r['spec'] == s for r in results]))
- gspec_results = bindist.get_mirrors_for_spec(gspec)
- gspec_mirrors = {}
- for result in gspec_results:
- s = result['spec']
- assert(s._full_hash == full_hash_map[s.name])
- assert(result['mirror_url'] not in gspec_mirrors)
- gspec_mirrors[result['mirror_url']] = True
-
- cspec_results = bindist.get_mirrors_for_spec(cspec, full_hash_match=True)
-
- cspec_mirrors = {}
- for result in cspec_results:
- s = result['spec']
- assert(s._full_hash == full_hash_map[s.name])
- assert(result['mirror_url'] not in cspec_mirrors)
- cspec_mirrors[result['mirror_url']] = True
-
-
-def fake_full_hash(spec):
+def fake_dag_hash(spec):
# Generate an arbitrary hash that is intended to be different than
# whatever a Spec reported before (to test actions that trigger when
# the hash changes)
@@ -430,7 +411,7 @@ def fake_full_hash(spec):
'test_mirror'
)
def test_spec_needs_rebuild(monkeypatch, tmpdir):
- """Make sure needs_rebuild properly compares remote full_hash
+ """Make sure needs_rebuild properly compares remote hash
against locally computed one, avoiding unnecessary rebuilds"""
# Create a temp mirror directory for buildcache usage
@@ -450,7 +431,7 @@ def test_spec_needs_rebuild(monkeypatch, tmpdir):
assert not rebuild
# Now monkey patch Spec to change the full hash on the package
- monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash)
+ monkeypatch.setattr(spack.spec.Spec, 'dag_hash', fake_dag_hash)
rebuild = bindist.needs_rebuild(s, mirror_url, rebuild_on_errors=True)
@@ -624,57 +605,6 @@ def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config,
uninstall_cmd('-y', '/t5mczux3tfqpxwmg7egp7axy2jvyulqk')
-@pytest.mark.usefixtures(
- 'install_mockery_mutable_config', 'mock_packages', 'mock_fetch',
-)
-def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
- """Ensure spack buildcache update-index properly fixes up spec descriptor
- files on the mirror when updating the buildcache index."""
-
- # Create a temp mirror directory for buildcache usage
- mirror_dir = tmpdir.join('mirror_dir')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
- spack.config.set('mirrors', {'test': mirror_url})
-
- a = Spec('a').concretized()
- b = Spec('b').concretized()
- new_b_full_hash = 'abcdef'
-
- # Install package a with dep b
- install_cmd('--no-cache', a.name)
-
- # Create a buildcache for a and its dep b, and update index
- buildcache_cmd('create', '-uad', mirror_dir.strpath, a.name)
- buildcache_cmd('update-index', '-d', mirror_dir.strpath)
-
- # Simulate an update to b that only affects full hash by simply overwriting
- # the full hash in the spec.json file on the mirror
- b_spec_json_name = bindist.tarball_name(b, '.spec.json')
- b_spec_json_path = os.path.join(mirror_dir.strpath,
- bindist.build_cache_relative_path(),
- b_spec_json_name)
- fs.filter_file(r'"full_hash":\s"\S+"',
- '"full_hash": "{0}"'.format(new_b_full_hash),
- b_spec_json_path)
- # When we update the index, spack should notice that a's notion of the
- # full hash of b doesn't match b's notion of it's own full hash, and as
- # a result, spack should fix the spec.json for a
- buildcache_cmd('update-index', '-d', mirror_dir.strpath)
-
- # Read in the concrete spec json of a
- a_spec_json_name = bindist.tarball_name(a, '.spec.json')
- a_spec_json_path = os.path.join(mirror_dir.strpath,
- bindist.build_cache_relative_path(),
- a_spec_json_name)
-
- # Turn concrete spec json into a concrete spec (a)
- with open(a_spec_json_path) as fd:
- a_prime = spec.Spec.from_json(fd.read())
-
- # Make sure the full hash of b in a's spec json matches the new value
- assert(a_prime[b.name].full_hash() == new_b_full_hash)
-
-
def test_FetchCacheError_only_accepts_lists_of_errors():
with pytest.raises(TypeError, match="list"):
bindist.FetchCacheError("error")
diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py
index c0dd2c7a9e..6d71e47b85 100644
--- a/lib/spack/spack/test/cmd/ci.py
+++ b/lib/spack/spack/test/cmd/ci.py
@@ -767,19 +767,13 @@ spack:
shutil.copyfile(env.lock_path,
os.path.join(env_dir.strpath, 'spack.lock'))
- root_spec_build_hash = None
- job_spec_dag_hash = None
- job_spec_full_hash = None
+ root_spec_dag_hash = None
for h, s in env.specs_by_hash.items():
if s.name == 'archive-files':
- root_spec_build_hash = h
- job_spec_dag_hash = s.dag_hash()
- job_spec_full_hash = s.full_hash()
+ root_spec_dag_hash = h
- assert root_spec_build_hash
- assert job_spec_dag_hash
- assert job_spec_full_hash
+ assert root_spec_dag_hash
def fake_cdash_register(build_name, base_url, project, site, track):
return ('fakebuildid', 'fakestamp')
@@ -801,8 +795,8 @@ spack:
'SPACK_CONCRETE_ENV_DIR': env_dir.strpath,
'CI_PIPELINE_ID': '7192',
'SPACK_SIGNING_KEY': signing_key,
- 'SPACK_ROOT_SPEC': root_spec_build_hash,
- 'SPACK_JOB_SPEC_DAG_HASH': job_spec_dag_hash,
+ 'SPACK_ROOT_SPEC': root_spec_dag_hash,
+ 'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
'SPACK_COMPILER_ACTION': 'NONE',
'SPACK_CDASH_BUILD_NAME': '(specs) archive-files',
@@ -816,8 +810,8 @@ spack:
expected_repro_files = [
'install.sh',
- 'root.yaml',
- 'archive-files.yaml',
+ 'root.json',
+ 'archive-files.json',
'spack.yaml',
'spack.lock'
]
@@ -839,14 +833,13 @@ spack:
install_parts = [mystrip(s) for s in install_line.split(' ')]
assert('--keep-stage' in install_parts)
- assert('--require-full-hash-match' in install_parts)
assert('--no-check-signature' not in install_parts)
assert('--no-add' in install_parts)
assert('-f' in install_parts)
flag_index = install_parts.index('-f')
- assert('archive-files.yaml' in install_parts[flag_index + 1])
+ assert('archive-files.json' in install_parts[flag_index + 1])
- broken_spec_file = os.path.join(broken_specs_path, job_spec_full_hash)
+ broken_spec_file = os.path.join(broken_specs_path, root_spec_dag_hash)
with open(broken_spec_file) as fd:
broken_spec_content = fd.read()
assert(ci_job_url in broken_spec_content)
@@ -894,13 +887,11 @@ spack:
env_cmd('create', 'test', './spack.yaml')
with ev.read('test') as env:
env.concretize()
- root_spec_build_hash = None
- job_spec_dag_hash = None
+ root_spec_dag_hash = None
for h, s in env.specs_by_hash.items():
if s.name == 'archive-files':
- root_spec_build_hash = h
- job_spec_dag_hash = s.dag_hash()
+ root_spec_dag_hash = h
# Create environment variables as gitlab would do it
os.environ.update({
@@ -909,8 +900,8 @@ spack:
'SPACK_JOB_REPRO_DIR': 'repro_dir',
'SPACK_LOCAL_MIRROR_DIR': mirror_dir.strpath,
'SPACK_CONCRETE_ENV_DIR': tmpdir.strpath,
- 'SPACK_ROOT_SPEC': root_spec_build_hash,
- 'SPACK_JOB_SPEC_DAG_HASH': job_spec_dag_hash,
+ 'SPACK_ROOT_SPEC': root_spec_dag_hash,
+ 'SPACK_JOB_SPEC_DAG_HASH': root_spec_dag_hash,
'SPACK_JOB_SPEC_PKG_NAME': 'archive-files',
'SPACK_COMPILER_ACTION': 'NONE',
'SPACK_REMOTE_MIRROR_URL': mirror_url,
@@ -980,7 +971,7 @@ spack:
spec_map = ci.get_concrete_specs(
env, 'patchelf', 'patchelf', 'FIND_ANY')
concrete_spec = spec_map['patchelf']
- spec_json = concrete_spec.to_json(hash=ht.build_hash)
+ spec_json = concrete_spec.to_json(hash=ht.dag_hash)
json_path = str(tmpdir.join('spec.json'))
with open(json_path, 'w') as ypfd:
ypfd.write(spec_json)
@@ -1323,12 +1314,12 @@ spack:
spec_map = ci.get_concrete_specs(
env, 'callpath', 'callpath', 'FIND_ANY')
concrete_spec = spec_map['callpath']
- spec_yaml = concrete_spec.to_yaml(hash=ht.build_hash)
- yaml_path = str(tmpdir.join('spec.yaml'))
- with open(yaml_path, 'w') as ypfd:
- ypfd.write(spec_yaml)
+ spec_json = concrete_spec.to_json(hash=ht.dag_hash)
+ json_path = str(tmpdir.join('spec.json'))
+ with open(json_path, 'w') as ypfd:
+ ypfd.write(spec_json)
- install_cmd('--keep-stage', '-f', yaml_path)
+ install_cmd('--keep-stage', '-f', json_path)
buildcache_cmd('create', '-u', '-a', '-f', '--mirror-url',
mirror_url, 'callpath')
ci_cmd('rebuild-index')
@@ -1412,8 +1403,8 @@ spack:
# nothing in the environment needs rebuilding. With the monkeypatch, the
# process sees the compiler as needing a rebuild, which should then result
# in the specs built with that compiler needing a rebuild too.
- def fake_get_mirrors_for_spec(spec=None, full_hash_match=False,
- mirrors_to_check=None, index_only=False):
+ def fake_get_mirrors_for_spec(spec=None, mirrors_to_check=None,
+ index_only=False):
if spec.name == 'gcc':
return []
else:
@@ -1674,14 +1665,14 @@ def test_ci_generate_read_broken_specs_url(tmpdir, mutable_mock_env_path,
"""Verify that `broken-specs-url` works as intended"""
spec_a = Spec('a')
spec_a.concretize()
- a_full_hash = spec_a.full_hash()
+ a_dag_hash = spec_a.dag_hash()
spec_flattendeps = Spec('flatten-deps')
spec_flattendeps.concretize()
- flattendeps_full_hash = spec_flattendeps.full_hash()
+ flattendeps_dag_hash = spec_flattendeps.dag_hash()
# Mark 'a' as broken (but not 'flatten-deps')
- broken_spec_a_path = str(tmpdir.join(a_full_hash))
+ broken_spec_a_path = str(tmpdir.join(a_dag_hash))
with open(broken_spec_a_path, 'w') as bsf:
bsf.write('')
@@ -1718,10 +1709,10 @@ spack:
output = ci_cmd('generate', output=str, fail_on_error=False)
assert('known to be broken' in output)
- ex = '({0})'.format(a_full_hash)
+ ex = '({0})'.format(a_dag_hash)
assert(ex in output)
- ex = '({0})'.format(flattendeps_full_hash)
+ ex = '({0})'.format(flattendeps_dag_hash)
assert(ex not in output)
@@ -1776,15 +1767,15 @@ spack:
root_spec = s
job_spec = s
- job_spec_yaml_path = os.path.join(
- working_dir.strpath, 'archivefiles.yaml')
- with open(job_spec_yaml_path, 'w') as fd:
- fd.write(job_spec.to_yaml(hash=ht.build_hash))
+ job_spec_json_path = os.path.join(
+ working_dir.strpath, 'archivefiles.json')
+ with open(job_spec_json_path, 'w') as fd:
+ fd.write(job_spec.to_json(hash=ht.dag_hash))
- root_spec_yaml_path = os.path.join(
- working_dir.strpath, 'root.yaml')
- with open(root_spec_yaml_path, 'w') as fd:
- fd.write(root_spec.to_yaml(hash=ht.build_hash))
+ root_spec_json_path = os.path.join(
+ working_dir.strpath, 'root.json')
+ with open(root_spec_json_path, 'w') as fd:
+ fd.write(root_spec.to_json(hash=ht.dag_hash))
artifacts_root = os.path.join(working_dir.strpath, 'scratch_dir')
pipeline_path = os.path.join(artifacts_root, 'pipeline.yml')
@@ -1798,8 +1789,8 @@ spack:
repro_file = os.path.join(working_dir.strpath, 'repro.json')
repro_details = {
'job_name': job_name,
- 'job_spec_yaml': 'archivefiles.yaml',
- 'root_spec_yaml': 'root.yaml',
+ 'job_spec_json': 'archivefiles.json',
+ 'root_spec_json': 'root.json',
'ci_project_dir': working_dir.strpath
}
with open(repro_file, 'w') as fd:
diff --git a/lib/spack/spack/test/cmd/env.py b/lib/spack/spack/test/cmd/env.py
index f698cd2715..ae9dbd195a 100644
--- a/lib/spack/spack/test/cmd/env.py
+++ b/lib/spack/spack/test/cmd/env.py
@@ -982,7 +982,7 @@ def create_v1_lockfile_dict(roots, all_specs):
# Version one lockfiles use the dag hash without build deps as keys,
# but they write out the full node dict (including build deps)
"concrete_specs": dict(
- (s.runtime_hash(), s.to_node_dict(hash=ht.build_hash))
+ (s.runtime_hash(), s.to_node_dict(hash=ht.dag_hash))
for s in all_specs
)
}
@@ -2469,19 +2469,19 @@ spack:
abspath = tmpdir.join('spack.yaml')
abspath.write(spack_yaml)
- def extract_build_hash(environment):
+ def extract_dag_hash(environment):
_, dyninst = next(iter(environment.specs_by_hash.items()))
- return dyninst['libelf'].build_hash()
+ return dyninst['libelf'].dag_hash()
# Concretize a first time and create a lockfile
with ev.Environment(str(tmpdir)) as e:
concretize()
- libelf_first_hash = extract_build_hash(e)
+ libelf_first_hash = extract_dag_hash(e)
# Check that a second run won't error
with ev.Environment(str(tmpdir)) as e:
concretize()
- libelf_second_hash = extract_build_hash(e)
+ libelf_second_hash = extract_dag_hash(e)
assert libelf_first_hash == libelf_second_hash
diff --git a/lib/spack/spack/test/cmd/install.py b/lib/spack/spack/test/cmd/install.py
index fea3431a73..40c665dba5 100644
--- a/lib/spack/spack/test/cmd/install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -7,7 +7,6 @@ import argparse
import filecmp
import os
import re
-import shutil
import sys
import time
@@ -621,20 +620,20 @@ def test_cdash_buildstamp_param(tmpdir, mock_fetch, install_mockery, capfd):
@pytest.mark.disable_clean_stage_check
-def test_cdash_install_from_spec_yaml(tmpdir, mock_fetch, install_mockery,
+def test_cdash_install_from_spec_json(tmpdir, mock_fetch, install_mockery,
capfd, mock_packages, mock_archive,
config):
# capfd interferes with Spack's capturing
with capfd.disabled():
with tmpdir.as_cwd():
- spec_yaml_path = str(tmpdir.join('spec.yaml'))
+ spec_json_path = str(tmpdir.join('spec.json'))
pkg_spec = Spec('a')
pkg_spec.concretize()
- with open(spec_yaml_path, 'w') as fd:
- fd.write(pkg_spec.to_yaml(hash=ht.build_hash))
+ with open(spec_json_path, 'w') as fd:
+ fd.write(pkg_spec.to_json(hash=ht.dag_hash))
install(
'--log-format=cdash',
@@ -642,7 +641,7 @@ def test_cdash_install_from_spec_yaml(tmpdir, mock_fetch, install_mockery,
'--cdash-build=my_custom_build',
'--cdash-site=my_custom_site',
'--cdash-track=my_custom_track',
- '-f', spec_yaml_path)
+ '-f', spec_json_path)
report_dir = tmpdir.join('cdash_reports')
assert report_dir in tmpdir.listdir()
@@ -846,14 +845,14 @@ def test_install_no_add_in_env(tmpdir, mock_fetch, install_mockery,
post_install_specs = e.all_specs()
assert all([s in env_specs for s in post_install_specs])
- # Make sure we can install a concrete dependency spec from a spec.yaml
+ # Make sure we can install a concrete dependency spec from a spec.json
# file on disk, using the ``--no-add` option, and the spec is installed
# but not added as a root
- mpi_spec_yaml_path = tmpdir.join('{0}.yaml'.format(mpi_spec.name))
- with open(mpi_spec_yaml_path.strpath, 'w') as fd:
- fd.write(mpi_spec.to_yaml(hash=ht.build_hash))
+ mpi_spec_json_path = tmpdir.join('{0}.json'.format(mpi_spec.name))
+ with open(mpi_spec_json_path.strpath, 'w') as fd:
+ fd.write(mpi_spec.to_json(hash=ht.dag_hash))
- install('--no-add', '-f', mpi_spec_yaml_path.strpath)
+ install('--no-add', '-f', mpi_spec_json_path.strpath)
assert(mpi_spec not in e.roots())
find_output = find('-l', output=str)
@@ -1016,76 +1015,6 @@ def test_install_fails_no_args_suggests_env_activation(tmpdir):
assert 'using the `spack.yaml` in this directory' in output
-default_full_hash = spack.spec.Spec.full_hash
-
-
-def fake_full_hash(spec):
- # Generate an arbitrary hash that is intended to be different than
- # whatever a Spec reported before (to test actions that trigger when
- # the hash changes)
- if spec.name == 'libdwarf':
- return 'tal4c7h4z0gqmixb1eqa92mjoybxn5l6'
- return default_full_hash(spec)
-
-
-def test_cache_install_full_hash_match(
- install_mockery_mutable_config, mock_packages, mock_fetch,
- mock_archive, mutable_config, monkeypatch, tmpdir):
- """Make sure installing from cache respects full hash argument"""
-
- # Create a temp mirror directory for buildcache usage
- mirror_dir = tmpdir.join('mirror_dir')
- mirror_url = 'file://{0}'.format(mirror_dir.strpath)
-
- s = Spec('libdwarf').concretized()
- package_id = spack.installer.package_id(s.package)
-
- # Install a package
- install(s.name)
-
- # Put installed package in the buildcache
- buildcache('create', '-u', '-a', '-f', '-d', mirror_dir.strpath, s.name)
-
- # Now uninstall the package
- uninstall('-y', s.name)
-
- # Configure the mirror with the binary package in it
- mirror('add', 'test-mirror', mirror_url)
-
- # Make sure we get the binary version by default
- install_output = install('--no-check-signature', s.name, output=str)
- expect_extract_msg = 'Extracting {0} from binary cache'.format(package_id)
-
- assert expect_extract_msg in install_output
-
- uninstall('-y', s.name)
-
- # Now monkey patch Spec to change the full hash on the package
- monkeypatch.setattr(spack.spec.Spec, 'full_hash', fake_full_hash)
-
- # Check that even if the full hash changes, we install from binary when
- # we don't explicitly require the full hash to match
- install_output = install('--no-check-signature', s.name, output=str)
- assert expect_extract_msg in install_output
-
- uninstall('-y', s.name)
-
- # Finally, make sure that if we insist on the full hash match, spack
- # installs from source.
- install_output = install('--require-full-hash-match', s.name, output=str)
- expect_msg = 'No binary for {0} found: installing from source'.format(
- package_id)
-
- assert expect_msg in install_output
-
- uninstall('-y', s.name)
- mirror('rm', 'test-mirror')
-
- # Get rid of that libdwarf binary in the mirror so other tests don't try to
- # use it and fail because of NoVerifyException
- shutil.rmtree(mirror_dir.strpath)
-
-
def test_install_env_with_tests_all(tmpdir, mock_packages, mock_fetch,
install_mockery, mutable_mock_env_path):
env('create', 'test')
diff --git a/lib/spack/spack/test/cmd/test.py b/lib/spack/spack/test/cmd/test.py
index a696424c17..f24f40c830 100644
--- a/lib/spack/spack/test/cmd/test.py
+++ b/lib/spack/spack/test/cmd/test.py
@@ -256,11 +256,11 @@ def test_hash_change(mock_test_stage, mock_packages, mock_archive, mock_fetch,
outfile = os.path.join(testdir, 'test_suite.lock')
with open(outfile, 'r') as f:
output = f.read()
- val_replace = '"full_hash": "{0}"'.format(
- spack.store.db.query('printing-package')[0].full_hash())
+ val_replace = '"hash": "{0}"'.format(
+ spack.store.db.query('printing-package')[0].dag_hash())
changed_hash = output.replace(
val_replace,
- '"full_hash": "fakehash492ucwhwvzhxfbmcc45x49ha"')
+ '"hash": "fakehash492ucwhwvzhxfbmcc45x49ha"')
with open(outfile, 'w') as f:
f.write(changed_hash)
@@ -271,6 +271,8 @@ def test_hash_change(mock_test_stage, mock_packages, mock_archive, mock_fetch,
results_output = spack_test('results')
assert 'PASSED' in results_output
+ assert(False)
+
def test_test_results_none(mock_packages, mock_test_stage):
name = 'trivial'
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 4b573bbeda..f4b11056e8 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -1080,8 +1080,7 @@ class TestConcretize(object):
s._old_concretize(), t._new_concretize()
assert s.dag_hash() == t.dag_hash()
- assert s.build_hash() == t.build_hash()
- assert s.full_hash() == t.full_hash()
+ assert s.runtime_hash() == t.runtime_hash()
def test_external_that_would_require_a_virtual_dependency(self):
s = Spec('requires-virtual').concretized()
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index 7aa6a565fc..3bacd97ffa 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -118,13 +118,7 @@ def test_read_and_write_spec(temporary_store, config, mock_packages):
# Make sure spec file can be read back in to get the original spec
spec_from_file = layout.read_spec(spec_path)
- # currently we don't store build dependency information when
- # we write out specs to the filesystem.
-
- # TODO: fix this when we can concretize more loosely based on
- # TODO: what is installed. We currently omit these to
- # TODO: increase reuse of build dependencies.
- stored_deptypes = spack.hash_types.full_hash
+ stored_deptypes = spack.hash_types.dag_hash
expected = spec.copy(deps=stored_deptypes)
expected._mark_concrete()
diff --git a/lib/spack/spack/test/monitor.py b/lib/spack/spack/test/monitor.py
index 7ea7a0ddac..74a57ec898 100644
--- a/lib/spack/spack/test/monitor.py
+++ b/lib/spack/spack/test/monitor.py
@@ -49,7 +49,7 @@ def mock_monitor_request(monkeypatch):
def mock_do_request(self, endpoint, *args, **kwargs):
build = {"build_id": 1,
- "spec_full_hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
+ "spec_hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
"spec_name": "dttop"}
# Service Info
@@ -111,7 +111,7 @@ def mock_monitor_request(monkeypatch):
elif endpoint == "specs/new/":
return {"message": "success",
"data": {
- "full_hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
+ "hash": "bpfvysmqndtmods4rmy6d6cfquwblngp",
"name": "dttop",
"version": "1.0",
"spack_version": "0.16.0-1379-7a5351d495",
@@ -264,12 +264,12 @@ def test_install_monitor_save_local(install_mockery_mutable_config,
# Get the spec name
spec = spack.spec.Spec("dttop")
spec.concretize()
- full_hash = spec.full_hash()
+ dag_hash = spec.dag_hash()
# Ensure we have monitor results saved
for dirname in os.listdir(str(reports_dir)):
dated_dir = os.path.join(str(reports_dir), dirname)
- build_metadata = "build-metadata-%s.json" % full_hash
+ build_metadata = "build-metadata-%s.json" % dag_hash
assert build_metadata in os.listdir(dated_dir)
spec_file = "spec-dttop-%s-config.json" % spec.version
assert spec_file in os.listdir(dated_dir)
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 22a39d8f1f..df73f989c9 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -1001,7 +1001,7 @@ class TestSpecSematics(object):
dep.concretize()
# Sanity checking that these are not the same thing.
assert dep.dag_hash() != spec['splice-h'].dag_hash()
- assert dep.build_hash() != spec['splice-h'].build_hash()
+ assert dep.runtime_hash() != spec['splice-h'].runtime_hash()
# Do the splice.
out = spec.splice(dep, transitive)
# Returned spec should still be concrete.
@@ -1009,18 +1009,18 @@ class TestSpecSematics(object):
# Traverse the spec and assert that all dependencies are accounted for.
for node in spec.traverse():
assert node.name in out
- # If the splice worked, then the full hash of the spliced dep should
- # now match the full hash of the build spec of the dependency from the
+ # If the splice worked, then the dag hash of the spliced dep should
+ # now match the dag hash of the build spec of the dependency from the
# returned spec.
out_h_build = out['splice-h'].build_spec
- assert out_h_build.full_hash() == dep.full_hash()
+ assert out_h_build.dag_hash() == dep.dag_hash()
# Transitivity should determine whether the transitive dependency was
# changed.
expected_z = dep['splice-z'] if transitive else spec['splice-z']
- assert out['splice-z'].full_hash() == expected_z.full_hash()
+ assert out['splice-z'].dag_hash() == expected_z.dag_hash()
# Sanity check build spec of out should be the original spec.
- assert (out['splice-t'].build_spec.full_hash() ==
- spec['splice-t'].full_hash())
+ assert (out['splice-t'].build_spec.dag_hash() ==
+ spec['splice-t'].dag_hash())
# Finally, the spec should know it's been spliced:
assert out.spliced
@@ -1032,39 +1032,39 @@ class TestSpecSematics(object):
dep.concretize()
# monkeypatch hashes so we can test that they are cached
- spec._full_hash = 'aaaaaa'
- spec._build_hash = 'aaaaaa'
- dep._full_hash = 'bbbbbb'
- dep._build_hash = 'bbbbbb'
- spec['splice-h']._full_hash = 'cccccc'
- spec['splice-h']._build_hash = 'cccccc'
- spec['splice-z']._full_hash = 'dddddd'
- spec['splice-z']._build_hash = 'dddddd'
- dep['splice-z']._full_hash = 'eeeeee'
- dep['splice-z']._build_hash = 'eeeeee'
+ spec._hash = 'aaaaaa'
+ spec._runtime_hash = 'aaaaaa'
+ dep._hash = 'bbbbbb'
+ dep._runtime_hash = 'bbbbbb'
+ spec['splice-h']._hash = 'cccccc'
+ spec['splice-h']._runtime_hash = 'cccccc'
+ spec['splice-z']._hash = 'dddddd'
+ spec['splice-z']._runtime_hash = 'dddddd'
+ dep['splice-z']._hash = 'eeeeee'
+ dep['splice-z']._runtime_hash = 'eeeeee'
out = spec.splice(dep, transitive=transitive)
out_z_expected = (dep if transitive else spec)['splice-z']
- assert out.full_hash() != spec.full_hash()
- assert (out['splice-h'].full_hash() == dep.full_hash()) == transitive
- assert out['splice-z'].full_hash() == out_z_expected.full_hash()
+ assert out.dag_hash() != spec.dag_hash()
+ assert (out['splice-h'].dag_hash() == dep.dag_hash()) == transitive
+ assert out['splice-z'].dag_hash() == out_z_expected.dag_hash()
- assert out.build_hash() != spec.build_hash()
- assert (out['splice-h'].build_hash() == dep.build_hash()) == transitive
- assert out['splice-z'].build_hash() == out_z_expected.build_hash()
+ assert out.runtime_hash() != spec.runtime_hash()
+ assert (out['splice-h'].runtime_hash() == dep.runtime_hash()) == transitive
+ assert out['splice-z'].runtime_hash() == out_z_expected.runtime_hash()
@pytest.mark.parametrize('transitive', [True, False])
def test_splice_input_unchanged(self, transitive):
spec = Spec('splice-t').concretized()
dep = Spec('splice-h+foo').concretized()
- orig_spec_hash = spec.full_hash()
- orig_dep_hash = dep.full_hash()
+ orig_spec_hash = spec.dag_hash()
+ orig_dep_hash = dep.dag_hash()
spec.splice(dep, transitive)
# Post-splice, dag hash should still be different; no changes should be
# made to these specs.
- assert spec.full_hash() == orig_spec_hash
- assert dep.full_hash() == orig_dep_hash
+ assert spec.dag_hash() == orig_spec_hash
+ assert dep.dag_hash() == orig_dep_hash
@pytest.mark.parametrize('transitive', [True, False])
def test_splice_subsequent(self, transitive):
@@ -1079,12 +1079,12 @@ class TestSpecSematics(object):
# Transitivity shouldn't matter since Splice Z has no dependencies.
out2 = out.splice(dep, transitive)
assert out2.concrete
- assert out2['splice-z'].build_hash() != spec['splice-z'].build_hash()
- assert out2['splice-z'].build_hash() != out['splice-z'].build_hash()
- assert out2['splice-z'].full_hash() != spec['splice-z'].full_hash()
- assert out2['splice-z'].full_hash() != out['splice-z'].full_hash()
- assert (out2['splice-t'].build_spec.full_hash() ==
- spec['splice-t'].full_hash())
+ assert out2['splice-z'].runtime_hash() != spec['splice-z'].runtime_hash()
+ assert out2['splice-z'].runtime_hash() != out['splice-z'].runtime_hash()
+ assert out2['splice-z'].dag_hash() != spec['splice-z'].dag_hash()
+ assert out2['splice-z'].dag_hash() != out['splice-z'].dag_hash()
+ assert (out2['splice-t'].build_spec.dag_hash() ==
+ spec['splice-t'].dag_hash())
assert out2.spliced
@pytest.mark.parametrize('transitive', [True, False])
@@ -1096,13 +1096,13 @@ class TestSpecSematics(object):
out = spec.splice(dep, transitive)
# Sanity check all hashes are unique...
- assert spec.full_hash() != dep.full_hash()
- assert out.full_hash() != dep.full_hash()
- assert out.full_hash() != spec.full_hash()
+ assert spec.dag_hash() != dep.dag_hash()
+ assert out.dag_hash() != dep.dag_hash()
+ assert out.dag_hash() != spec.dag_hash()
node_list = out.to_dict()['spec']['nodes']
- root_nodes = [n for n in node_list if n['full_hash'] == out.full_hash()]
- build_spec_nodes = [n for n in node_list if n['full_hash'] == spec.full_hash()]
- assert spec.full_hash() == out.build_spec.full_hash()
+ root_nodes = [n for n in node_list if n['hash'] == out.dag_hash()]
+ build_spec_nodes = [n for n in node_list if n['hash'] == spec.dag_hash()]
+ assert spec.dag_hash() == out.build_spec.dag_hash()
assert len(root_nodes) == 1
assert len(build_spec_nodes) == 1
@@ -1115,28 +1115,28 @@ class TestSpecSematics(object):
out = spec.splice(dep, transitive)
# Sanity check all hashes are unique...
- assert spec.full_hash() != dep.full_hash()
- assert out.full_hash() != dep.full_hash()
- assert out.full_hash() != spec.full_hash()
+ assert spec.dag_hash() != dep.dag_hash()
+ assert out.dag_hash() != dep.dag_hash()
+ assert out.dag_hash() != spec.dag_hash()
out_rt_spec = Spec.from_dict(out.to_dict()) # rt is "round trip"
- assert out_rt_spec.full_hash() == out.full_hash()
- out_rt_spec_bld_hash = out_rt_spec.build_spec.full_hash()
- out_rt_spec_h_bld_hash = out_rt_spec['splice-h'].build_spec.full_hash()
- out_rt_spec_z_bld_hash = out_rt_spec['splice-z'].build_spec.full_hash()
+ assert out_rt_spec.dag_hash() == out.dag_hash()
+ out_rt_spec_bld_hash = out_rt_spec.build_spec.dag_hash()
+ out_rt_spec_h_bld_hash = out_rt_spec['splice-h'].build_spec.dag_hash()
+ out_rt_spec_z_bld_hash = out_rt_spec['splice-z'].build_spec.dag_hash()
# In any case, the build spec for splice-t (root) should point to the
# original spec, preserving build provenance.
- assert spec.full_hash() == out_rt_spec_bld_hash
- assert out_rt_spec.full_hash() != out_rt_spec_bld_hash
+ assert spec.dag_hash() == out_rt_spec_bld_hash
+ assert out_rt_spec.dag_hash() != out_rt_spec_bld_hash
# The build spec for splice-h should always point to the introduced
# spec, since that is the spec spliced in.
- assert dep['splice-h'].full_hash() == out_rt_spec_h_bld_hash
+ assert dep['splice-h'].dag_hash() == out_rt_spec_h_bld_hash
# The build spec for splice-z will depend on whether or not the splice
# was transitive.
- expected_z_bld_hash = (dep['splice-z'].full_hash() if transitive else
- spec['splice-z'].full_hash())
+ expected_z_bld_hash = (dep['splice-z'].dag_hash() if transitive else
+ spec['splice-z'].dag_hash())
assert expected_z_bld_hash == out_rt_spec_z_bld_hash
@pytest.mark.parametrize('spec,constraint,expected_result', [
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index 48e55254f4..3971e02d48 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -499,7 +499,7 @@ class TestSpecSyntax(object):
specfile = tmpdir.join('libdwarf.yaml')
with specfile.open('w') as f:
- f.write(s.to_yaml(hash=ht.build_hash))
+ f.write(s.to_yaml(hash=ht.dag_hash))
# Check an absolute path to spec.yaml by itself:
# "spack spec /path/to/libdwarf.yaml"
@@ -522,7 +522,7 @@ class TestSpecSyntax(object):
# write the file to the current directory to make sure it exists,
# and that we still do not parse the spec as a file.
with specfile.open('w') as f:
- f.write(s.to_yaml(hash=ht.build_hash))
+ f.write(s.to_yaml(hash=ht.dag_hash))
# Check the spec `libelf.yaml` in the working directory, which
# should evaluate to a spec called `yaml` in the `libelf`
@@ -562,7 +562,7 @@ class TestSpecSyntax(object):
specfile = tmpdir.join('libelf.yaml')
with specfile.open('w') as f:
- f.write(s['libelf'].to_yaml(hash=ht.build_hash))
+ f.write(s['libelf'].to_yaml(hash=ht.dag_hash))
# Make sure we can use yaml path as dependency, e.g.:
# "spack spec libdwarf ^ /path/to/libelf.yaml"
@@ -577,7 +577,7 @@ class TestSpecSyntax(object):
specfile = tmpdir.join('libdwarf.yaml')
with specfile.open('w') as f:
- f.write(s.to_yaml(hash=ht.build_hash))
+ f.write(s.to_yaml(hash=ht.dag_hash))
file_name = specfile.basename
parent_dir = os.path.basename(specfile.dirname)
@@ -612,7 +612,7 @@ class TestSpecSyntax(object):
specfile = tmpdir.mkdir('subdir').join('libdwarf.yaml')
with specfile.open('w') as f:
- f.write(s.to_yaml(hash=ht.build_hash))
+ f.write(s.to_yaml(hash=ht.dag_hash))
file_name = specfile.basename
@@ -632,7 +632,7 @@ class TestSpecSyntax(object):
specfile = tmpdir.join('libelf.yaml')
with specfile.open('w') as f:
- f.write(s['libelf'].to_yaml(hash=ht.build_hash))
+ f.write(s['libelf'].to_yaml(hash=ht.dag_hash))
file_name = specfile.basename
parent_dir = os.path.basename(specfile.dirname)
@@ -691,7 +691,7 @@ class TestSpecSyntax(object):
specfile = tmpdir.join('a.yaml')
with specfile.open('w') as f:
- f.write(s.to_yaml(hash=ht.build_hash))
+ f.write(s.to_yaml(hash=ht.dag_hash))
with pytest.raises(RedundantSpecError):
# Trying to change a variant on a concrete spec is an error
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index e3a7308605..3cfed7efa5 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -152,13 +152,8 @@ def test_using_ordered_dict(mock_packages):
assert level >= 5
-@pytest.mark.parametrize("hash_type", [
- ht.dag_hash,
- ht.build_hash,
- ht.full_hash
-])
def test_ordered_read_not_required_for_consistent_dag_hash(
- hash_type, config, mock_packages
+ config, mock_packages
):
"""Make sure ordered serialization isn't required to preserve hashes.
@@ -175,15 +170,15 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
#
# Dict & corresponding YAML & JSON from the original spec.
#
- spec_dict = spec.to_dict(hash=hash_type)
- spec_yaml = spec.to_yaml(hash=hash_type)
- spec_json = spec.to_json(hash=hash_type)
+ spec_dict = spec.to_dict()
+ spec_yaml = spec.to_yaml()
+ spec_json = spec.to_json()
#
# Make a spec with reversed OrderedDicts for every
# OrderedDict in the original.
#
- reversed_spec_dict = reverse_all_dicts(spec.to_dict(hash=hash_type))
+ reversed_spec_dict = reverse_all_dicts(spec.to_dict())
#
# Dump to YAML and JSON
@@ -218,7 +213,7 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
)
# Strip spec if we stripped the yaml
- spec = spec.copy(deps=hash_type.deptype)
+ spec = spec.copy(deps=ht.dag_hash.deptype)
# specs are equal to the original
assert spec == round_trip_yaml_spec
@@ -234,17 +229,16 @@ def test_ordered_read_not_required_for_consistent_dag_hash(
assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
- # full_hashes are equal if we round-tripped by build_hash or full_hash
- if hash_type in (ht.build_hash, ht.full_hash):
- spec.concretize()
- round_trip_yaml_spec.concretize()
- round_trip_json_spec.concretize()
- round_trip_reversed_yaml_spec.concretize()
- round_trip_reversed_json_spec.concretize()
- assert spec.full_hash() == round_trip_yaml_spec.full_hash()
- assert spec.full_hash() == round_trip_json_spec.full_hash()
- assert spec.full_hash() == round_trip_reversed_yaml_spec.full_hash()
- assert spec.full_hash() == round_trip_reversed_json_spec.full_hash()
+ # dag_hash is equal after round-trip by dag_hash
+ spec.concretize()
+ round_trip_yaml_spec.concretize()
+ round_trip_json_spec.concretize()
+ round_trip_reversed_yaml_spec.concretize()
+ round_trip_reversed_json_spec.concretize()
+ assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
+ assert spec.dag_hash() == round_trip_json_spec.dag_hash()
+ assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
+ assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
@pytest.mark.parametrize("module", [
@@ -350,7 +344,7 @@ def test_save_dependency_spec_jsons_subset(tmpdir, config):
spec_a.concretize()
b_spec = spec_a['b']
c_spec = spec_a['c']
- spec_a_json = spec_a.to_json(hash=ht.build_hash)
+ spec_a_json = spec_a.to_json()
save_dependency_specfiles(spec_a_json, output_path, ['b', 'c'])
diff --git a/share/spack/spack-completion.bash b/share/spack/spack-completion.bash
index 551608c751..5e40746276 100755
--- a/share/spack/spack-completion.bash
+++ b/share/spack/spack-completion.bash
@@ -544,7 +544,7 @@ _spack_buildcache_preview() {
}
_spack_buildcache_check() {
- SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec --spec-file --rebuild-on-error"
+ SPACK_COMPREPLY="-h --help -m --mirror-url -o --output-file --scope -s --spec --spec-file"
}
_spack_buildcache_download() {
@@ -1174,7 +1174,7 @@ _spack_info() {
_spack_install() {
if $list_options
then
- SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --require-full-hash-match --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse"
+ SPACK_COMPREPLY="-h --help --only -u --until -j --jobs --overwrite --fail-fast --keep-prefix --keep-stage --dont-restage --use-cache --no-cache --cache-only --monitor --monitor-save-local --monitor-tags --monitor-keep-going --monitor-host --monitor-prefix --include-build-deps --no-check-signature --show-log-on-error --source -n --no-checksum --deprecated -v --verbose --fake --only-concrete --no-add -f --file --clean --dirty --test --log-format --log-file --help-cdash --cdash-upload-url --cdash-build --cdash-site --cdash-track --cdash-buildstamp -y --yes-to-all -U --fresh --reuse"
else
_all_packages
fi