summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorloulawrence <72574166+loulawrence@users.noreply.github.com>2021-08-02 04:30:25 -0400
committerGitHub <noreply@github.com>2021-08-02 10:30:25 +0200
commitbe3e6a0e9b8c243a4aec2a550b6512fe32553c13 (patch)
treeb1a3a1b15f9dea644babb5277eabf2cf4908e159
parentcd8f7d844d03bd82bece18524ec16321e7a97747 (diff)
downloadspack-be3e6a0e9b8c243a4aec2a550b6512fe32553c13.tar.gz
spack-be3e6a0e9b8c243a4aec2a550b6512fe32553c13.tar.bz2
spack-be3e6a0e9b8c243a4aec2a550b6512fe32553c13.tar.xz
spack-be3e6a0e9b8c243a4aec2a550b6512fe32553c13.zip
document config option "url_fetch_method" (#24638)
- Change config from the undocumented `use_curl: true/false` to `url_fetch_method: urllib/curl`. - Documentation of `url_fetch_method` in `defaults/config.yaml` - Default fetch option explicitly set to `urllib` for users who may not have curl on their system To upgrade from `use_curl` to `url_fetch_method`, run `spack config update config`
-rw-r--r--etc/spack/defaults/config.yaml4
-rw-r--r--lib/spack/spack/fetch_strategy.py4
-rw-r--r--lib/spack/spack/schema/config.py10
-rw-r--r--lib/spack/spack/test/cache_fetch.py12
-rw-r--r--lib/spack/spack/test/s3_fetch.py18
-rw-r--r--lib/spack/spack/test/url_fetch.py54
6 files changed, 58 insertions, 44 deletions
diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml
index 2400686e60..64a50cc805 100644
--- a/etc/spack/defaults/config.yaml
+++ b/etc/spack/defaults/config.yaml
@@ -134,6 +134,10 @@ config:
# enabling locks.
locks: true
+ # The default url fetch method to use.
+ # If set to 'curl', Spack will require curl on the user's system
+ # If set to 'urllib', Spack will use python built-in libs to fetch
+ url_fetch_method: urllib
# The maximum number of jobs to use for the build system (e.g. `make`), when
# the -j flag is not given on the command line. Defaults to 16 when not set.
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index f5eecdcb5d..136d786e3f 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -338,7 +338,7 @@ class URLFetchStrategy(FetchStrategy):
def _existing_url(self, url):
tty.debug('Checking existence of {0}'.format(url))
- if spack.config.get('config:use_curl'):
+ if spack.config.get('config:url_fetch_method') == 'curl':
curl = self.curl
# Telling curl to fetch the first byte (-r 0-0) is supposed to be
# portable.
@@ -357,7 +357,7 @@ class URLFetchStrategy(FetchStrategy):
return (response.getcode() is None or response.getcode() == 200)
def _fetch_from_url(self, url):
- if spack.config.get('config:use_curl'):
+ if spack.config.get('config:url_fetch_method') == 'curl':
return self._fetch_curl(url)
else:
return self._fetch_urllib(url)
diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py
index 1df922b137..0cf533eb18 100644
--- a/lib/spack/spack/schema/config.py
+++ b/lib/spack/spack/schema/config.py
@@ -99,6 +99,10 @@ properties = {
},
'allow_sgid': {'type': 'boolean'},
'binary_index_root': {'type': 'string'},
+ 'url_fetch_method': {
+ 'type': 'string',
+ 'enum': ['urllib', 'curl']
+ },
},
},
}
@@ -153,4 +157,10 @@ def update(data):
update_data = spack.config.merge_yaml(update_data, projections_data)
data['install_tree'] = update_data
changed = True
+
+ use_curl = data.pop('use_curl', None)
+ if use_curl is not None:
+ data['url_fetch_method'] = 'curl' if use_curl else 'urllib'
+ changed = True
+
return changed
diff --git a/lib/spack/spack/test/cache_fetch.py b/lib/spack/spack/test/cache_fetch.py
index 0929bfd12c..a3b9cafed8 100644
--- a/lib/spack/spack/test/cache_fetch.py
+++ b/lib/spack/spack/test/cache_fetch.py
@@ -14,25 +14,25 @@ from spack.fetch_strategy import CacheURLFetchStrategy, NoCacheError
from spack.stage import Stage
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_fetch_missing_cache(tmpdir, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_fetch_missing_cache(tmpdir, _fetch_method):
"""Ensure raise a missing cache file."""
testpath = str(tmpdir)
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
fetcher = CacheURLFetchStrategy(url='file:///not-a-real-cache-file')
with Stage(fetcher, path=testpath):
with pytest.raises(NoCacheError, match=r'No cache'):
fetcher.fetch()
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_fetch(tmpdir, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_fetch(tmpdir, _fetch_method):
"""Ensure a fetch after expanding is effectively a no-op."""
testpath = str(tmpdir)
cache = os.path.join(testpath, 'cache.tar.gz')
touch(cache)
url = 'file:///{0}'.format(cache)
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
fetcher = CacheURLFetchStrategy(url=url)
with Stage(fetcher, path=testpath) as stage:
source_path = stage.source_path
diff --git a/lib/spack/spack/test/s3_fetch.py b/lib/spack/spack/test/s3_fetch.py
index 2fc9e3de84..fbff60033d 100644
--- a/lib/spack/spack/test/s3_fetch.py
+++ b/lib/spack/spack/test/s3_fetch.py
@@ -12,20 +12,20 @@ import spack.fetch_strategy as spack_fs
import spack.stage as spack_stage
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_s3fetchstrategy_sans_url(use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_s3fetchstrategy_sans_url(_fetch_method):
"""Ensure constructor with no URL fails."""
- with spack_config.override('config:use_curl', use_curl):
+ with spack_config.override('config:url_fetch_method', _fetch_method):
with pytest.raises(ValueError):
spack_fs.S3FetchStrategy(None)
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_s3fetchstrategy_bad_url(tmpdir, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_s3fetchstrategy_bad_url(tmpdir, _fetch_method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
- with spack_config.override('config:use_curl', use_curl):
+ with spack_config.override('config:url_fetch_method', _fetch_method):
fetcher = spack_fs.S3FetchStrategy(url='file:///does-not-exist')
assert fetcher is not None
@@ -36,13 +36,13 @@ def test_s3fetchstrategy_bad_url(tmpdir, use_curl):
fetcher.fetch()
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_s3fetchstrategy_downloaded(tmpdir, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_s3fetchstrategy_downloaded(tmpdir, _fetch_method):
"""Ensure fetch with archive file already downloaded is a noop."""
testpath = str(tmpdir)
archive = os.path.join(testpath, 's3.tar.gz')
- with spack_config.override('config:use_curl', use_curl):
+ with spack_config.override('config:url_fetch_method', _fetch_method):
class Archived_S3FS(spack_fs.S3FetchStrategy):
@property
def archive_file(self):
diff --git a/lib/spack/spack/test/url_fetch.py b/lib/spack/spack/test/url_fetch.py
index b466c40c06..b0bd53af72 100644
--- a/lib/spack/spack/test/url_fetch.py
+++ b/lib/spack/spack/test/url_fetch.py
@@ -49,20 +49,20 @@ def pkg_factory():
return factory
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_urlfetchstrategy_sans_url(use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_urlfetchstrategy_sans_url(_fetch_method):
"""Ensure constructor with no URL fails."""
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
with pytest.raises(ValueError):
with fs.URLFetchStrategy(None):
pass
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_urlfetchstrategy_bad_url(tmpdir, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_urlfetchstrategy_bad_url(tmpdir, _fetch_method):
"""Ensure fetch with bad URL fails as expected."""
testpath = str(tmpdir)
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
with pytest.raises(fs.FailedDownloadError):
fetcher = fs.URLFetchStrategy(url='file:///does-not-exist')
assert fetcher is not None
@@ -75,7 +75,7 @@ def test_urlfetchstrategy_bad_url(tmpdir, use_curl):
def test_fetch_options(tmpdir, mock_archive):
testpath = str(tmpdir)
- with spack.config.override('config:use_curl', True):
+ with spack.config.override('config:url_fetch_method', 'curl'):
fetcher = fs.URLFetchStrategy(url=mock_archive.url,
fetch_options={'cookie': 'True',
'timeout': 10})
@@ -87,11 +87,11 @@ def test_fetch_options(tmpdir, mock_archive):
fetcher.fetch()
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_archive_file_errors(tmpdir, mock_archive, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_archive_file_errors(tmpdir, mock_archive, _fetch_method):
"""Ensure FetchStrategy commands may only be used as intended"""
testpath = str(tmpdir)
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
fetcher = fs.URLFetchStrategy(url=mock_archive.url)
assert fetcher is not None
with pytest.raises(fs.FailedDownloadError):
@@ -112,7 +112,7 @@ def test_archive_file_errors(tmpdir, mock_archive, use_curl):
@pytest.mark.parametrize('secure', [True, False])
-@pytest.mark.parametrize('use_curl', [True, False])
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
@pytest.mark.parametrize('mock_archive',
[('.tar.gz', 'z'), ('.tgz', 'z'),
('.tar.bz2', 'j'), ('.tbz2', 'j'),
@@ -121,7 +121,7 @@ def test_archive_file_errors(tmpdir, mock_archive, use_curl):
def test_fetch(
mock_archive,
secure,
- use_curl,
+ _fetch_method,
checksum_type,
config,
mutable_mock_repo
@@ -147,7 +147,7 @@ def test_fetch(
# Enter the stage directory and check some properties
with pkg.stage:
with spack.config.override('config:verify_ssl', secure):
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
pkg.do_stage()
with working_dir(pkg.stage.source_path):
assert os.path.exists('configure')
@@ -168,13 +168,13 @@ def test_fetch(
('url-list-test @3.0a1', 'foo-3.0a1.tar.gz', 'abc30a1'),
('url-list-test @4.5-rc5', 'foo-4.5-rc5.tar.gz', 'abc45rc5'),
])
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_from_list_url(mock_packages, config, spec, url, digest, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_from_list_url(mock_packages, config, spec, url, digest, _fetch_method):
"""
Test URLs in the url-list-test package, which means they should
have checksums in the package.
"""
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
specification = Spec(spec).concretized()
pkg = spack.repo.get(specification)
fetch_strategy = fs.from_list_url(pkg)
@@ -187,10 +187,10 @@ def test_from_list_url(mock_packages, config, spec, url, digest, use_curl):
assert fetch_strategy.extra_options == {'timeout': 60}
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_from_list_url_unspecified(mock_packages, config, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_from_list_url_unspecified(mock_packages, config, _fetch_method):
"""Test non-specific URLs from the url-list-test package."""
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
pkg = spack.repo.get('url-list-test')
spec = Spec('url-list-test @2.0.0').concretized()
@@ -236,7 +236,7 @@ def test_url_with_status_bar(tmpdir, mock_archive, monkeypatch, capfd):
monkeypatch.setattr(sys.stdout, 'isatty', is_true)
monkeypatch.setattr(tty, 'msg_enabled', is_true)
- with spack.config.override('config:use_curl', True):
+ with spack.config.override('config:url_fetch_method', 'curl'):
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
assert fetcher.archive_file is None
@@ -246,10 +246,10 @@ def test_url_with_status_bar(tmpdir, mock_archive, monkeypatch, capfd):
assert '##### 100' in status
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_url_extra_fetch(tmpdir, mock_archive, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_url_extra_fetch(tmpdir, mock_archive, _fetch_method):
"""Ensure a fetch after downloading is effectively a no-op."""
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
testpath = str(tmpdir)
fetcher = fs.URLFetchStrategy(mock_archive.url)
with Stage(fetcher, path=testpath) as stage:
@@ -267,12 +267,12 @@ def test_url_extra_fetch(tmpdir, mock_archive, use_curl):
['https://ftpmirror.gnu.org/autoconf/autoconf-2.62.tar.gz',
'https://ftp.gnu.org/gnu/autoconf/autoconf-2.62.tar.gz'])
])
-@pytest.mark.parametrize('use_curl', [True, False])
-def test_candidate_urls(pkg_factory, url, urls, version, expected, use_curl):
+@pytest.mark.parametrize('_fetch_method', ['curl', 'urllib'])
+def test_candidate_urls(pkg_factory, url, urls, version, expected, _fetch_method):
"""Tests that candidate urls include mirrors and that they go through
pattern matching and substitution for versions.
"""
- with spack.config.override('config:use_curl', use_curl):
+ with spack.config.override('config:url_fetch_method', _fetch_method):
pkg = pkg_factory(url, urls)
f = fs._from_merged_attrs(fs.URLFetchStrategy, pkg, version)
assert f.candidate_urls == expected
@@ -298,7 +298,7 @@ def test_missing_curl(tmpdir, monkeypatch):
testpath = str(tmpdir)
url = 'http://github.com/spack/spack'
- with spack.config.override('config:use_curl', True):
+ with spack.config.override('config:url_fetch_method', 'curl'):
fetcher = fs.URLFetchStrategy(url=url)
assert fetcher is not None
with pytest.raises(TypeError, match='object is not callable'):