summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorHarmen Stoppels <harmenstoppels@gmail.com>2021-10-12 22:11:07 +0200
committerGitHub <noreply@github.com>2021-10-12 22:11:07 +0200
commit1ed695dca790c5e5e1a262ed974fb0fa043e71df (patch)
tree4bf5ba2f09e5d089f093d47300d4415aac830c52 /lib
parentb6ad9848d295d65a4afbe8b2b26285ed0e4039ae (diff)
downloadspack-1ed695dca790c5e5e1a262ed974fb0fa043e71df.tar.gz
spack-1ed695dca790c5e5e1a262ed974fb0fa043e71df.tar.bz2
spack-1ed695dca790c5e5e1a262ed974fb0fa043e71df.tar.xz
spack-1ed695dca790c5e5e1a262ed974fb0fa043e71df.zip
Improve error messages for bootstrap download failures (#26599)
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/binary_distribution.py81
-rw-r--r--lib/spack/spack/bootstrap.py2
-rw-r--r--lib/spack/spack/ci.py5
-rw-r--r--lib/spack/spack/cmd/buildcache.py12
-rw-r--r--lib/spack/spack/test/bindist.py23
5 files changed, 100 insertions, 23 deletions
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index c8e866ff14..c976dc80fb 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -47,6 +47,25 @@ _build_cache_relative_path = 'build_cache'
_build_cache_keys_relative_path = '_pgp'
+class FetchCacheError(Exception):
+ """Error thrown when fetching the cache failed, usually a composite error list."""
+ def __init__(self, errors):
+ if not isinstance(errors, list):
+ raise TypeError("Expected a list of errors")
+ self.errors = errors
+ if len(errors) > 1:
+ msg = " Error {0}: {1}: {2}"
+ self.message = "Multiple errors during fetching:\n"
+ self.message += "\n".join((
+ msg.format(i + 1, err.__class__.__name__, str(err))
+ for (i, err) in enumerate(errors)
+ ))
+ else:
+ err = errors[0]
+ self.message = "{0}: {1}".format(err.__class__.__name__, str(err))
+ super(FetchCacheError, self).__init__(self.message)
+
+
class BinaryCacheIndex(object):
"""
The BinaryCacheIndex tracks what specs are available on (usually remote)
@@ -293,14 +312,22 @@ class BinaryCacheIndex(object):
# Otherwise the concrete spec cache should not need to be updated at
# all.
+ fetch_errors = []
+ all_methods_failed = True
+
for cached_mirror_url in self._local_index_cache:
cache_entry = self._local_index_cache[cached_mirror_url]
cached_index_hash = cache_entry['index_hash']
cached_index_path = cache_entry['index_path']
if cached_mirror_url in configured_mirror_urls:
# May need to fetch the index and update the local caches
- needs_regen = self._fetch_and_cache_index(
- cached_mirror_url, expect_hash=cached_index_hash)
+ try:
+ needs_regen = self._fetch_and_cache_index(
+ cached_mirror_url, expect_hash=cached_index_hash)
+ all_methods_failed = False
+ except FetchCacheError as fetch_error:
+ needs_regen = False
+ fetch_errors.extend(fetch_error.errors)
# The need to regenerate implies a need to clear as well.
spec_cache_clear_needed |= needs_regen
spec_cache_regenerate_needed |= needs_regen
@@ -327,7 +354,12 @@ class BinaryCacheIndex(object):
for mirror_url in configured_mirror_urls:
if mirror_url not in self._local_index_cache:
# Need to fetch the index and update the local caches
- needs_regen = self._fetch_and_cache_index(mirror_url)
+ try:
+ needs_regen = self._fetch_and_cache_index(mirror_url)
+ all_methods_failed = False
+ except FetchCacheError as fetch_error:
+ fetch_errors.extend(fetch_error.errors)
+ needs_regen = False
# Generally speaking, a new mirror wouldn't imply the need to
# clear the spec cache, so leave it as is.
if needs_regen:
@@ -335,7 +367,9 @@ class BinaryCacheIndex(object):
self._write_local_index_cache()
- if spec_cache_regenerate_needed:
+ if all_methods_failed:
+ raise FetchCacheError(fetch_errors)
+ elif spec_cache_regenerate_needed:
self.regenerate_spec_cache(clear_existing=spec_cache_clear_needed)
def _fetch_and_cache_index(self, mirror_url, expect_hash=None):
@@ -354,6 +388,8 @@ class BinaryCacheIndex(object):
True if this function thinks the concrete spec cache,
``_mirrors_for_spec``, should be regenerated. Returns False
otherwise.
+ Throws:
+ FetchCacheError: a composite exception.
"""
index_fetch_url = url_util.join(
mirror_url, _build_cache_relative_path, 'index.json')
@@ -363,14 +399,19 @@ class BinaryCacheIndex(object):
old_cache_key = None
fetched_hash = None
+ errors = []
+
# Fetch the hash first so we can check if we actually need to fetch
# the index itself.
try:
_, _, fs = web_util.read_from_url(hash_fetch_url)
fetched_hash = codecs.getreader('utf-8')(fs).read()
except (URLError, web_util.SpackWebError) as url_err:
- tty.debug('Unable to read index hash {0}'.format(
- hash_fetch_url), url_err, 1)
+ errors.append(
+ RuntimeError("Unable to read index hash {0} due to {1}: {2}".format(
+ hash_fetch_url, url_err.__class__.__name__, str(url_err)
+ ))
+ )
# The only case where we'll skip attempting to fetch the buildcache
# index from the mirror is when we already have a hash for this
@@ -397,24 +438,23 @@ class BinaryCacheIndex(object):
_, _, fs = web_util.read_from_url(index_fetch_url)
index_object_str = codecs.getreader('utf-8')(fs).read()
except (URLError, web_util.SpackWebError) as url_err:
- tty.debug('Unable to read index {0}'.format(index_fetch_url),
- url_err, 1)
- # We failed to fetch the index, even though we decided it was
- # necessary. However, regenerating the spec cache won't produce
- # anything different than what it has already, so return False.
- return False
+ errors.append(
+ RuntimeError("Unable to read index {0} due to {1}: {2}".format(
+ index_fetch_url, url_err.__class__.__name__, str(url_err)
+ ))
+ )
+ raise FetchCacheError(errors)
locally_computed_hash = compute_hash(index_object_str)
if fetched_hash is not None and locally_computed_hash != fetched_hash:
- msg_tmpl = ('Computed hash ({0}) did not match remote ({1}), '
- 'indicating error in index transmission')
- tty.error(msg_tmpl.format(locally_computed_hash, expect_hash))
+ msg = ('Computed hash ({0}) did not match remote ({1}), '
+ 'indicating error in index transmission').format(
+ locally_computed_hash, expect_hash)
+ errors.append(RuntimeError(msg))
# We somehow got an index that doesn't match the remote one, maybe
- # the next time we try we'll be successful. Regardless, we're not
- # updating our index cache with this, so don't regenerate the spec
- # cache either.
- return False
+ # the next time we try we'll be successful.
+ raise FetchCacheError(errors)
url_hash = compute_hash(mirror_url)
@@ -1559,6 +1599,9 @@ def update_cache_and_get_specs():
possible, so this method will also attempt to initialize and update the
local index cache (essentially a no-op if it has been done already and
nothing has changed on the configured mirrors.)
+
+ Throws:
+ FetchCacheError
"""
binary_index.update()
return binary_index.get_all_built_specs()
diff --git a/lib/spack/spack/bootstrap.py b/lib/spack/spack/bootstrap.py
index 3fad57b5ae..0bdf49c5e8 100644
--- a/lib/spack/spack/bootstrap.py
+++ b/lib/spack/spack/bootstrap.py
@@ -218,7 +218,7 @@ class _BuildcacheBootstrapper(object):
index = spack.binary_distribution.update_cache_and_get_specs()
if not index:
- raise RuntimeError("could not populate the binary index")
+ raise RuntimeError("The binary index is empty")
for item in data['verified']:
candidate_spec = item['spec']
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index 32b5da451a..733b5f3384 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -668,7 +668,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
# Speed up staging by first fetching binary indices from all mirrors
# (including the per-PR mirror we may have just added above).
- bindist.binary_index.update()
+ try:
+ bindist.binary_index.update()
+ except bindist.FetchCacheError as e:
+ tty.error(e)
staged_phases = {}
try:
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index 889c93d690..13b0158ff4 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -334,7 +334,11 @@ def match_downloaded_specs(pkgs, allow_multiple_matches=False, force=False,
specs_from_cli = []
has_errors = False
- specs = bindist.update_cache_and_get_specs()
+ try:
+ specs = bindist.update_cache_and_get_specs()
+ except bindist.FetchCacheError as e:
+ tty.error(e)
+
if not other_arch:
arch = spack.spec.Spec.default_arch()
specs = [s for s in specs if s.satisfies(arch)]
@@ -560,7 +564,11 @@ def install_tarball(spec, args):
def listspecs(args):
"""list binary packages available from mirrors"""
- specs = bindist.update_cache_and_get_specs()
+ try:
+ specs = bindist.update_cache_and_get_specs()
+ except bindist.FetchCacheError as e:
+ tty.error(e)
+
if not args.allarch:
arch = spack.spec.Spec.default_arch()
specs = [s for s in specs if s.satisfies(arch)]
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index abaa0905d0..dbcf0d8ea6 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -666,3 +666,26 @@ def test_update_index_fix_deps(monkeypatch, tmpdir, mutable_config):
# Make sure the full hash of b in a's spec json matches the new value
assert(a_prime[b.name].full_hash() == new_b_full_hash)
+
+
+def test_FetchCacheError_only_accepts_lists_of_errors():
+ with pytest.raises(TypeError, match="list"):
+ bindist.FetchCacheError("error")
+
+
+def test_FetchCacheError_pretty_printing_multiple():
+ e = bindist.FetchCacheError([RuntimeError("Oops!"), TypeError("Trouble!")])
+ str_e = str(e)
+ print("'" + str_e + "'")
+ assert "Multiple errors" in str_e
+ assert "Error 1: RuntimeError: Oops!" in str_e
+ assert "Error 2: TypeError: Trouble!" in str_e
+ assert str_e.rstrip() == str_e
+
+
+def test_FetchCacheError_pretty_printing_single():
+ e = bindist.FetchCacheError([RuntimeError("Oops!")])
+ str_e = str(e)
+ assert "Multiple errors" not in str_e
+ assert "RuntimeError: Oops!" in str_e
+ assert str_e.rstrip() == str_e