summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorDom Heinzeller <dom.heinzeller@icloud.com>2022-05-09 19:35:17 +0200
committerGitHub <noreply@github.com>2022-05-09 10:35:17 -0700
commitc49508648a5c4d36fe3985b1e4eb39b4cfa3b128 (patch)
tree11ae4595f51e67658fd9153383b66f836cd1bd24 /lib
parent9bcf496f2185d1a379379606f8793b966c703655 (diff)
downloadspack-c49508648a5c4d36fe3985b1e4eb39b4cfa3b128.tar.gz
spack-c49508648a5c4d36fe3985b1e4eb39b4cfa3b128.tar.bz2
spack-c49508648a5c4d36fe3985b1e4eb39b4cfa3b128.tar.xz
spack-c49508648a5c4d36fe3985b1e4eb39b4cfa3b128.zip
Get timeout for web requests with urllib from spack config, same as for curl (#30468)
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/util/web.py10
1 files changed, 5 insertions, 5 deletions
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 6896c308cf..4bd0d92586 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -49,9 +49,6 @@ else:
class HTMLParseError(Exception):
pass
-# Timeout in seconds for web requests
-_timeout = 10
-
class LinkParser(HTMLParser):
"""This parser just takes an HTML page and strips out the hrefs on the
@@ -100,6 +97,9 @@ def read_from_url(url, accept_content_type=None):
verify_ssl = spack.config.get('config:verify_ssl')
+ # Timeout in seconds for web requests
+ timeout = spack.config.get('config:connect_timeout', 10)
+
# Don't even bother with a context unless the URL scheme is one that uses
# SSL certs.
if uses_ssl(url):
@@ -131,7 +131,7 @@ def read_from_url(url, accept_content_type=None):
# one round-trip. However, most servers seem to ignore the header
# if you ask for a tarball with Accept: text/html.
req.get_method = lambda: "HEAD"
- resp = _urlopen(req, timeout=_timeout, context=context)
+ resp = _urlopen(req, timeout=timeout, context=context)
content_type = get_header(resp.headers, 'Content-type')
@@ -139,7 +139,7 @@ def read_from_url(url, accept_content_type=None):
req.get_method = lambda: "GET"
try:
- response = _urlopen(req, timeout=_timeout, context=context)
+ response = _urlopen(req, timeout=timeout, context=context)
except URLError as err:
raise SpackWebError('Download failed: {ERROR}'.format(
ERROR=str(err)))