From b58ec9e2b9bb969ba034b35ffe7412db6648e760 Mon Sep 17 00:00:00 2001 From: Harmen Stoppels Date: Tue, 6 Dec 2022 16:12:20 +0100 Subject: Remove legacy yaml from buildcache fetch (#34347) --- lib/spack/spack/binary_distribution.py | 102 ++++++++------------------------- lib/spack/spack/test/bindist.py | 24 -------- 2 files changed, 24 insertions(+), 102 deletions(-) diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index b5ec57687d..235b51973f 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -914,8 +914,6 @@ def _read_specs_and_push_index(file_list, read_method, cache_prefix, db, temp_di return Spec.from_dict(specfile_json) if spec_url.endswith(".json"): return Spec.from_json(spec_file_contents) - if spec_url.endswith(".yaml"): - return Spec.from_yaml(spec_file_contents) tp = multiprocessing.pool.ThreadPool(processes=concurrency) try: @@ -990,8 +988,6 @@ def _specs_from_cache_aws_cli(cache_prefix): "*.spec.json.sig", "--include", "*.spec.json", - "--include", - "*.spec.yaml", cache_prefix, tmpspecsdir, ] @@ -1001,7 +997,7 @@ def _specs_from_cache_aws_cli(cache_prefix): "Using aws s3 sync to download specs from {0} to {1}".format(cache_prefix, tmpspecsdir) ) aws(*sync_command_args, output=os.devnull, error=os.devnull) - file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json", "*.spec.yaml"]) + file_list = fsys.find(tmpspecsdir, ["*.spec.json.sig", "*.spec.json"]) read_fn = file_read_method except Exception: tty.warn("Failed to use aws s3 sync to retrieve specs, falling back to parallel fetch") @@ -1037,9 +1033,7 @@ def _specs_from_cache_fallback(cache_prefix): file_list = [ url_util.join(cache_prefix, entry) for entry in web_util.list_url(cache_prefix) - if entry.endswith(".yaml") - or entry.endswith("spec.json") - or entry.endswith("spec.json.sig") + if entry.endswith("spec.json") or entry.endswith("spec.json.sig") ] read_fn = url_read_method except KeyError as inst: @@ -1101,14 +1095,6 @@ def generate_package_index(cache_prefix, concurrency=32): tty.error("Unabled to generate package index, {0}".format(err)) return - if any(x.endswith(".yaml") for x in file_list): - msg = ( - "The mirror in '{}' contains specs in the deprecated YAML format.\n\n\tSupport for " - "this format will be removed in v0.20, please regenerate the build cache with a " - "recent Spack\n" - ).format(cache_prefix) - warnings.warn(msg) - tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix)) tmpdir = tempfile.mkdtemp() @@ -1236,15 +1222,11 @@ def _build_tarball( specfile_name = tarball_name(spec, ".spec.json") specfile_path = os.path.realpath(os.path.join(cache_prefix, specfile_name)) signed_specfile_path = "{0}.sig".format(specfile_path) - deprecated_specfile_path = specfile_path.replace(".spec.json", ".spec.yaml") remote_specfile_path = url_util.join( outdir, os.path.relpath(specfile_path, os.path.realpath(tmpdir)) ) remote_signed_specfile_path = "{0}.sig".format(remote_specfile_path) - remote_specfile_path_deprecated = url_util.join( - outdir, os.path.relpath(deprecated_specfile_path, os.path.realpath(tmpdir)) - ) # If force and exists, overwrite. Otherwise raise exception on collision. if force: @@ -1252,12 +1234,8 @@ def _build_tarball( web_util.remove_url(remote_specfile_path) if web_util.url_exists(remote_signed_specfile_path): web_util.remove_url(remote_signed_specfile_path) - if web_util.url_exists(remote_specfile_path_deprecated): - web_util.remove_url(remote_specfile_path_deprecated) - elif ( - web_util.url_exists(remote_specfile_path) - or web_util.url_exists(remote_signed_specfile_path) - or web_util.url_exists(remote_specfile_path_deprecated) + elif web_util.url_exists(remote_specfile_path) or web_util.url_exists( + remote_signed_specfile_path ): raise NoOverwriteException(url_util.format(remote_specfile_path)) @@ -1313,12 +1291,10 @@ def _build_tarball( with open(spec_file, "r") as inputfile: content = inputfile.read() - if spec_file.endswith(".yaml"): - spec_dict = yaml.load(content) - elif spec_file.endswith(".json"): + if spec_file.endswith(".json"): spec_dict = sjson.load(content) else: - raise ValueError("{0} not a valid spec file type (json or yaml)".format(spec_file)) + raise ValueError("{0} not a valid spec file type".format(spec_file)) spec_dict["buildcache_layout_version"] = 1 bchecksum = {} bchecksum["hash_algorithm"] = "sha256" @@ -1539,7 +1515,7 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): # Assumes we care more about finding a spec file by preferred ext # than by mirrory priority. This can be made less complicated as # we remove support for deprecated spec formats and buildcache layouts. - for ext in ["json.sig", "json", "yaml"]: + for ext in ["json.sig", "json"]: for mirror_to_try in mirrors_to_try: specfile_url = "{0}.{1}".format(mirror_to_try["specfile"], ext) spackfile_url = mirror_to_try["spackfile"] @@ -1576,13 +1552,6 @@ def download_tarball(spec, unsigned=False, mirrors_for_spec=None): # the remaining mirrors, looking for one we can use. tarball_stage = try_fetch(spackfile_url) if tarball_stage: - if ext == "yaml": - msg = ( - "Reading {} from mirror.\n\n\tThe YAML format for buildcaches is " - "deprecated and will be removed in v0.20\n" - ).format(spackfile_url) - warnings.warn(msg) - return { "tarball_stage": tarball_stage, "specfile_stage": local_specfile_stage, @@ -1826,8 +1795,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum spackfile_path = os.path.join(stagepath, spackfile_name) tarfile_name = tarball_name(spec, ".tar.gz") tarfile_path = os.path.join(extract_to, tarfile_name) - deprecated_yaml_name = tarball_name(spec, ".spec.yaml") - deprecated_yaml_path = os.path.join(extract_to, deprecated_yaml_name) json_name = tarball_name(spec, ".spec.json") json_path = os.path.join(extract_to, json_name) with closing(tarfile.open(spackfile_path, "r")) as tar: @@ -1839,8 +1806,6 @@ def _extract_inner_tarball(spec, filename, extract_to, unsigned, remote_checksum if os.path.exists(json_path): specfile_path = json_path - elif os.path.exists(deprecated_yaml_path): - specfile_path = deprecated_yaml_path else: raise ValueError("Cannot find spec file for {0}.".format(extract_to)) @@ -1887,10 +1852,8 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for content = inputfile.read() if specfile_path.endswith(".json.sig"): spec_dict = Spec.extract_json_from_clearsig(content) - elif specfile_path.endswith(".json"): - spec_dict = sjson.load(content) else: - spec_dict = syaml.load(content) + spec_dict = sjson.load(content) bchecksum = spec_dict["binary_cache_checksum"] filename = download_result["tarball_stage"].save_filename @@ -1902,7 +1865,7 @@ def extract_tarball(spec, download_result, allow_root=False, unsigned=False, for or int(spec_dict["buildcache_layout_version"]) < 1 ): # Handle the older buildcache layout where the .spack file - # contains a spec json/yaml, maybe an .asc file (signature), + # contains a spec json, maybe an .asc file (signature), # and another tarball containing the actual install tree. tmpdir = tempfile.mkdtemp() try: @@ -2053,17 +2016,12 @@ def try_direct_fetch(spec, mirrors=None): """ Try to find the spec directly on the configured mirrors """ - deprecated_specfile_name = tarball_name(spec, ".spec.yaml") specfile_name = tarball_name(spec, ".spec.json") signed_specfile_name = tarball_name(spec, ".spec.json.sig") specfile_is_signed = False - specfile_is_json = True found_specs = [] for mirror in spack.mirror.MirrorCollection(mirrors=mirrors).values(): - buildcache_fetch_url_yaml = url_util.join( - mirror.fetch_url, _build_cache_relative_path, deprecated_specfile_name - ) buildcache_fetch_url_json = url_util.join( mirror.fetch_url, _build_cache_relative_path, specfile_name ) @@ -2077,28 +2035,19 @@ def try_direct_fetch(spec, mirrors=None): try: _, _, fs = web_util.read_from_url(buildcache_fetch_url_json) except (URLError, web_util.SpackWebError, HTTPError) as url_err_x: - try: - _, _, fs = web_util.read_from_url(buildcache_fetch_url_yaml) - specfile_is_json = False - except (URLError, web_util.SpackWebError, HTTPError) as url_err_y: - tty.debug( - "Did not find {0} on {1}".format( - specfile_name, buildcache_fetch_url_signed_json - ), - url_err, - level=2, - ) - tty.debug( - "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json), - url_err_x, - level=2, - ) - tty.debug( - "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_yaml), - url_err_y, - level=2, - ) - continue + tty.debug( + "Did not find {0} on {1}".format( + specfile_name, buildcache_fetch_url_signed_json + ), + url_err, + level=2, + ) + tty.debug( + "Did not find {0} on {1}".format(specfile_name, buildcache_fetch_url_json), + url_err_x, + level=2, + ) + continue specfile_contents = codecs.getreader("utf-8")(fs).read() # read the spec from the build cache file. All specs in build caches @@ -2107,10 +2056,8 @@ def try_direct_fetch(spec, mirrors=None): if specfile_is_signed: specfile_json = Spec.extract_json_from_clearsig(specfile_contents) fetched_spec = Spec.from_dict(specfile_json) - elif specfile_is_json: - fetched_spec = Spec.from_json(specfile_contents) else: - fetched_spec = Spec.from_yaml(specfile_contents) + fetched_spec = Spec.from_json(specfile_contents) fetched_spec._mark_concrete() found_specs.append( @@ -2321,7 +2268,7 @@ def needs_rebuild(spec, mirror_url): specfile_path = os.path.join(cache_prefix, specfile_name) # Only check for the presence of the json version of the spec. If the - # mirror only has the yaml version, or doesn't have the spec at all, we + # mirror only has the json version, or doesn't have the spec at all, we # need to rebuild. return not web_util.url_exists(specfile_path) @@ -2429,7 +2376,6 @@ def download_single_spec(concrete_spec, destination, mirror_url=None): "url": [ tarball_name(concrete_spec, ".spec.json.sig"), tarball_name(concrete_spec, ".spec.json"), - tarball_name(concrete_spec, ".spec.yaml"), ], "path": destination, "required": True, diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py index c73612f101..3ac04531c7 100644 --- a/lib/spack/spack/test/bindist.py +++ b/lib/spack/spack/test/bindist.py @@ -5,7 +5,6 @@ import glob import os import platform -import shutil import sys import py @@ -65,16 +64,6 @@ def test_mirror(mirror_dir): mirror_cmd("rm", "--scope=site", "test-mirror-func") -@pytest.fixture(scope="function") -def test_legacy_mirror(mutable_config, tmpdir): - mirror_dir = tmpdir.join("legacy_yaml_mirror") - shutil.copytree(legacy_mirror_dir, mirror_dir.strpath) - mirror_url = "file://%s" % mirror_dir - mirror_cmd("add", "--scope", "site", "test-legacy-yaml", mirror_url) - yield mirror_dir - mirror_cmd("rm", "--scope=site", "test-legacy-yaml") - - @pytest.fixture(scope="module") def config_directory(tmpdir_factory): tmpdir = tmpdir_factory.mktemp("test_configs") @@ -581,19 +570,6 @@ def test_update_sbang(tmpdir, test_mirror): uninstall_cmd("-y", "/%s" % new_spec.dag_hash()) -# Need one where the platform has been changed to the test platform. -def test_install_legacy_yaml(test_legacy_mirror, install_mockery_mutable_config, mock_packages): - install_cmd( - "--no-check-signature", - "--cache-only", - "-f", - legacy_mirror_dir - + "/build_cache/test-debian6-core2-gcc-4.5.0-zlib-" - + "1.2.11-t5mczux3tfqpxwmg7egp7axy2jvyulqk.spec.yaml", - ) - uninstall_cmd("-y", "/t5mczux3tfqpxwmg7egp7axy2jvyulqk") - - def test_install_legacy_buildcache_layout(install_mockery_mutable_config): """Legacy buildcache layout involved a nested archive structure where the .spack file contained a repeated spec.json and another -- cgit v1.2.3-60-g2f50