diff options
-rw-r--r-- | lib/spack/spack/fetch_strategy.py | 4 | ||||
-rw-r--r-- | lib/spack/spack/stage.py | 8 | ||||
-rw-r--r-- | lib/spack/spack/util/web.py | 7 |
3 files changed, 11 insertions, 8 deletions
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py index 370ccef593..409cc334ee 100644 --- a/lib/spack/spack/fetch_strategy.py +++ b/lib/spack/spack/fetch_strategy.py @@ -1129,7 +1129,7 @@ class S3FetchStrategy(URLFetchStrategy): parsed_url = url_util.parse(self.url) if parsed_url.scheme != 's3': - raise ValueError( + raise FetchError( 'S3FetchStrategy can only fetch from s3:// urls.') tty.msg("Fetching %s" % self.url) @@ -1395,7 +1395,7 @@ class NoCacheError(FetchError): class FailedDownloadError(FetchError): - """Raised wen a download fails.""" + """Raised when a download fails.""" def __init__(self, url, msg=""): super(FailedDownloadError, self).__init__( "Failed to fetch file from URL: %s" % url, msg) diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py index 7869c5f863..d2dd3e6e7a 100644 --- a/lib/spack/spack/stage.py +++ b/lib/spack/spack/stage.py @@ -433,11 +433,9 @@ class Stage(object): # Add URL strategies for all the mirrors with the digest for url in urls: - fetchers.append(fs.from_url_scheme( - url, digest, expand=expand, extension=extension)) - # fetchers.insert( - # 0, fs.URLFetchStrategy( - # url, digest, expand=expand, extension=extension)) + fetchers.insert( + 0, fs.from_url_scheme( + url, digest, expand=expand, extension=extension)) if self.default_fetcher.cachable: for rel_path in reversed(list(self.mirror_paths)): diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py index f2afe769c6..1fe58d6415 100644 --- a/lib/spack/spack/util/web.py +++ b/lib/spack/spack/util/web.py @@ -177,7 +177,12 @@ def read_from_url(url, accept_content_type=None): # Do the real GET request when we know it's just HTML. req.get_method = lambda: "GET" - response = _urlopen(req, timeout=_timeout, context=context) + + try: + response = _urlopen(req, timeout=_timeout, context=context) + except URLError as err: + raise SpackWebError('Download failed: {ERROR}'.format( + ERROR=str(err))) if accept_content_type and not is_web_url: content_type = response.headers.get('Content-type') |