summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorHarmen Stoppels <me@harmenstoppels.nl>2024-08-12 13:06:13 +0200
committerGitHub <noreply@github.com>2024-08-12 13:06:13 +0200
commit3b59817ea7be35f4222dc88313734f7a30733ad7 (patch)
tree491f86ae21a4aa39ac269bd3e9c2d4b84173da39 /lib
parent06eacdf9d83e02812f06a405bf6a5a678e04be94 (diff)
downloadspack-3b59817ea7be35f4222dc88313734f7a30733ad7.tar.gz
spack-3b59817ea7be35f4222dc88313734f7a30733ad7.tar.bz2
spack-3b59817ea7be35f4222dc88313734f7a30733ad7.tar.xz
spack-3b59817ea7be35f4222dc88313734f7a30733ad7.zip
deal with TimeoutError from ssl.py (#45683)
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/binary_distribution.py60
-rw-r--r--lib/spack/spack/ci.py4
-rw-r--r--lib/spack/spack/fetch_strategy.py2
-rw-r--r--lib/spack/spack/util/web.py12
4 files changed, 30 insertions, 48 deletions
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 8fe272db7d..6108e69548 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -23,7 +23,6 @@ import urllib.request
import warnings
from contextlib import closing
from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple
-from urllib.error import HTTPError, URLError
import llnl.util.filesystem as fsys
import llnl.util.lang
@@ -899,9 +898,8 @@ def _specs_from_cache_fallback(cache_prefix):
try:
_, _, spec_file = web_util.read_from_url(url)
contents = codecs.getreader("utf-8")(spec_file).read()
- except (URLError, web_util.SpackWebError) as url_err:
- tty.error("Error reading specfile: {0}".format(url))
- tty.error(url_err)
+ except web_util.SpackWebError as e:
+ tty.error(f"Error reading specfile: {url}: {e}")
return contents
try:
@@ -2041,21 +2039,17 @@ def try_direct_fetch(spec, mirrors=None):
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_signed_json)
specfile_is_signed = True
- except (URLError, web_util.SpackWebError, HTTPError) as url_err:
+ except web_util.SpackWebError as e1:
try:
_, _, fs = web_util.read_from_url(buildcache_fetch_url_json)
- except (URLError, web_util.SpackWebError, HTTPError) as url_err_x:
+ except web_util.SpackWebError as e2:
tty.debug(
- "Did not find {0} on {1}".format(
- specfile_name, buildcache_fetch_url_signed_json
- ),
- url_err,
+ f"Did not find {specfile_name} on {buildcache_fetch_url_signed_json}",
+ e1,
level=2,
)
tty.debug(
- "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json),
- url_err_x,
- level=2,
+ f"Did not find {specfile_name} on {buildcache_fetch_url_json}", e2, level=2
)
continue
specfile_contents = codecs.getreader("utf-8")(fs).read()
@@ -2150,19 +2144,12 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
try:
_, _, json_file = web_util.read_from_url(keys_index)
json_index = sjson.load(codecs.getreader("utf-8")(json_file))
- except (URLError, web_util.SpackWebError) as url_err:
+ except web_util.SpackWebError as url_err:
if web_util.url_exists(keys_index):
- err_msg = [
- "Unable to find public keys in {0},",
- " caught exception attempting to read from {1}.",
- ]
-
tty.error(
- "".join(err_msg).format(
- url_util.format(fetch_url), url_util.format(keys_index)
- )
+ f"Unable to find public keys in {url_util.format(fetch_url)},"
+ f" caught exception attempting to read from {url_util.format(keys_index)}."
)
-
tty.debug(url_err)
continue
@@ -2442,7 +2429,7 @@ class DefaultIndexFetcher:
url_index_hash = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json.hash")
try:
response = self.urlopen(urllib.request.Request(url_index_hash, headers=self.headers))
- except urllib.error.URLError:
+ except (TimeoutError, urllib.error.URLError):
return None
# Validate the hash
@@ -2464,7 +2451,7 @@ class DefaultIndexFetcher:
try:
response = self.urlopen(urllib.request.Request(url_index, headers=self.headers))
- except urllib.error.URLError as e:
+ except (TimeoutError, urllib.error.URLError) as e:
raise FetchIndexError("Could not fetch index from {}".format(url_index), e) from e
try:
@@ -2505,10 +2492,7 @@ class EtagIndexFetcher:
def conditional_fetch(self) -> FetchIndexResult:
# Just do a conditional fetch immediately
url = url_util.join(self.url, BUILD_CACHE_RELATIVE_PATH, "index.json")
- headers = {
- "User-Agent": web_util.SPACK_USER_AGENT,
- "If-None-Match": '"{}"'.format(self.etag),
- }
+ headers = {"User-Agent": web_util.SPACK_USER_AGENT, "If-None-Match": f'"{self.etag}"'}
try:
response = self.urlopen(urllib.request.Request(url, headers=headers))
@@ -2516,14 +2500,14 @@ class EtagIndexFetcher:
if e.getcode() == 304:
# Not modified; that means fresh.
return FetchIndexResult(etag=None, hash=None, data=None, fresh=True)
- raise FetchIndexError("Could not fetch index {}".format(url), e) from e
- except urllib.error.URLError as e:
- raise FetchIndexError("Could not fetch index {}".format(url), e) from e
+ raise FetchIndexError(f"Could not fetch index {url}", e) from e
+ except (TimeoutError, urllib.error.URLError) as e:
+ raise FetchIndexError(f"Could not fetch index {url}", e) from e
try:
result = codecs.getreader("utf-8")(response).read()
except ValueError as e:
- raise FetchIndexError("Remote index {} is invalid".format(url), e) from e
+ raise FetchIndexError(f"Remote index {url} is invalid", e) from e
headers = response.headers
etag_header_value = headers.get("Etag", None) or headers.get("etag", None)
@@ -2554,21 +2538,19 @@ class OCIIndexFetcher:
headers={"Accept": "application/vnd.oci.image.manifest.v1+json"},
)
)
- except urllib.error.URLError as e:
- raise FetchIndexError(
- "Could not fetch manifest from {}".format(url_manifest), e
- ) from e
+ except (TimeoutError, urllib.error.URLError) as e:
+ raise FetchIndexError(f"Could not fetch manifest from {url_manifest}", e) from e
try:
manifest = json.loads(response.read())
except Exception as e:
- raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
+ raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
# Get first blob hash, which should be the index.json
try:
index_digest = spack.oci.image.Digest.from_string(manifest["layers"][0]["digest"])
except Exception as e:
- raise FetchIndexError("Remote index {} is invalid".format(url_manifest), e) from e
+ raise FetchIndexError(f"Remote index {url_manifest} is invalid", e) from e
# Fresh?
if index_digest.digest == self.local_hash:
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index db8e8f1a35..95e23cc64f 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -1107,7 +1107,7 @@ def generate_gitlab_ci_yaml(
if cdash_handler and cdash_handler.auth_token:
try:
cdash_handler.populate_buildgroup(all_job_names)
- except (SpackError, HTTPError, URLError) as err:
+ except (SpackError, HTTPError, URLError, TimeoutError) as err:
tty.warn(f"Problem populating buildgroup: {err}")
else:
tty.warn("Unable to populate buildgroup without CDash credentials")
@@ -2083,7 +2083,7 @@ def read_broken_spec(broken_spec_url):
"""
try:
_, _, fs = web_util.read_from_url(broken_spec_url)
- except (URLError, web_util.SpackWebError, HTTPError):
+ except web_util.SpackWebError:
tty.warn(f"Unable to read broken spec from {broken_spec_url}")
return None
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index c803b304c2..4aa7f339de 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -554,7 +554,7 @@ class OCIRegistryFetchStrategy(URLFetchStrategy):
try:
response = self._urlopen(self.url)
- except urllib.error.URLError as e:
+ except (TimeoutError, urllib.error.URLError) as e:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 8c843c5346..b681bb4950 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -197,8 +197,8 @@ def read_from_url(url, accept_content_type=None):
try:
response = urlopen(request)
- except URLError as err:
- raise SpackWebError("Download failed: {}".format(str(err)))
+ except (TimeoutError, URLError) as e:
+ raise SpackWebError(f"Download of {url.geturl()} failed: {e}")
if accept_content_type:
try:
@@ -458,8 +458,8 @@ def url_exists(url, curl=None):
timeout=spack.config.get("config:connect_timeout", 10),
)
return True
- except URLError as e:
- tty.debug("Failure reading URL: " + str(e))
+ except (TimeoutError, URLError) as e:
+ tty.debug(f"Failure reading {url}: {e}")
return False
@@ -740,10 +740,10 @@ def _spider(url: urllib.parse.ParseResult, collect_nested: bool, _visited: Set[s
subcalls.append(abs_link)
_visited.add(abs_link)
- except URLError as e:
+ except (TimeoutError, URLError) as e:
tty.debug(f"[SPIDER] Unable to read: {url}")
tty.debug(str(e), level=2)
- if hasattr(e, "reason") and isinstance(e.reason, ssl.SSLError):
+ if isinstance(e, URLError) and isinstance(e.reason, ssl.SSLError):
tty.warn(
"Spack was unable to fetch url list due to a "
"certificate verification problem. You can try "