summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorHarmen Stoppels <me@harmenstoppels.nl>2024-08-20 13:17:49 +0200
committerGitHub <noreply@github.com>2024-08-20 13:17:49 +0200
commit2ae5596e921d386761d96f52784d526ad8ce082b (patch)
tree50132c6b5b4b947ae9c8b13b3d2c6a86f7eca259 /lib
parent9d0b9f086f9b490bfdb7361ea549ca5574a254af (diff)
downloadspack-2ae5596e921d386761d96f52784d526ad8ce082b.tar.gz
spack-2ae5596e921d386761d96f52784d526ad8ce082b.tar.bz2
spack-2ae5596e921d386761d96f52784d526ad8ce082b.tar.xz
spack-2ae5596e921d386761d96f52784d526ad8ce082b.zip
Unify url and oci buildcache push code paths (#45776)
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/binary_distribution.py180
-rw-r--r--lib/spack/spack/ci.py4
-rw-r--r--lib/spack/spack/cmd/buildcache.py84
-rw-r--r--lib/spack/spack/cmd/gpg.py2
-rw-r--r--lib/spack/spack/hooks/autopush.py3
-rw-r--r--lib/spack/spack/test/bindist.py6
-rw-r--r--lib/spack/spack/test/build_distribution.py28
-rw-r--r--lib/spack/spack/test/cmd/buildcache.py28
-rw-r--r--lib/spack/spack/test/cmd/ci.py4
-rw-r--r--lib/spack/spack/test/install.py9
10 files changed, 206 insertions, 142 deletions
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index 182b45e33e..2862f5e7cb 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -6,7 +6,6 @@
import codecs
import collections
import concurrent.futures
-import contextlib
import copy
import hashlib
import io
@@ -25,7 +24,7 @@ import urllib.parse
import urllib.request
import warnings
from contextlib import closing
-from typing import Dict, Generator, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
+from typing import Dict, Iterable, List, NamedTuple, Optional, Set, Tuple, Union
import llnl.util.filesystem as fsys
import llnl.util.lang
@@ -958,7 +957,7 @@ def _spec_files_from_cache(url: str):
raise ListMirrorSpecsError("Failed to get list of specs from {0}".format(url))
-def generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
+def _url_generate_package_index(url: str, tmpdir: str, concurrency: int = 32):
"""Create or replace the build cache index on the given mirror. The
buildcache index contains an entry for each binary package under the
cache_prefix.
@@ -1119,7 +1118,7 @@ def _exists_in_buildcache(spec: Spec, tmpdir: str, out_url: str) -> ExistsInBuil
return ExistsInBuildcache(signed, unsigned, tarball)
-def _upload_tarball_and_specfile(
+def _url_upload_tarball_and_specfile(
spec: Spec, tmpdir: str, out_url: str, exists: ExistsInBuildcache, signing_key: Optional[str]
):
files = BuildcacheFiles(spec, tmpdir, out_url)
@@ -1154,47 +1153,144 @@ def _upload_tarball_and_specfile(
)
-def _format_spec(spec: Spec) -> str:
- return spec.cformat("{name}{@version}{/hash:7}")
+class Uploader:
+ def __init__(self, mirror: spack.mirror.Mirror, force: bool, update_index: bool):
+ self.mirror = mirror
+ self.force = force
+ self.update_index = update_index
+ self.tmpdir: str
+ self.executor: concurrent.futures.Executor
-@contextlib.contextmanager
-def default_push_context() -> Generator[Tuple[str, concurrent.futures.Executor], None, None]:
- with tempfile.TemporaryDirectory(
- dir=spack.stage.get_stage_root()
- ) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
- yield tmpdir, executor
+ def __enter__(self):
+ self._tmpdir = tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root())
+ self._executor = spack.util.parallel.make_concurrent_executor()
+ self.tmpdir = self._tmpdir.__enter__()
+ self.executor = self.executor = self._executor.__enter__()
-def push_or_raise(
- specs: List[Spec],
- out_url: str,
- signing_key: Optional[str],
- force: bool = False,
- update_index: bool = False,
-) -> List[Spec]:
- """Same as push, but raises an exception on error. Returns a list of skipped specs already
- present in the build cache when force=False."""
- skipped, errors = push(specs, out_url, signing_key, force, update_index)
- if errors:
- raise PushToBuildCacheError(
- f"Failed to push {len(errors)} specs to {out_url}:\n"
- + "\n".join(f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors)
+ return self
+
+ def __exit__(self, *args):
+ self._executor.__exit__(*args)
+ self._tmpdir.__exit__(*args)
+
+ def push_or_raise(self, specs: List[spack.spec.Spec]) -> List[spack.spec.Spec]:
+ skipped, errors = self.push(specs)
+ if errors:
+ raise PushToBuildCacheError(
+ f"Failed to push {len(errors)} specs to {self.mirror.push_url}:\n"
+ + "\n".join(
+ f"Failed to push {_format_spec(spec)}: {error}" for spec, error in errors
+ )
+ )
+ return skipped
+
+ def push(
+ self, specs: List[spack.spec.Spec]
+ ) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
+ raise NotImplementedError
+
+ def tag(self, tag: str, roots: List[spack.spec.Spec]):
+ """Make a list of selected specs together available under the given tag"""
+ pass
+
+
+class OCIUploader(Uploader):
+ def __init__(
+ self,
+ mirror: spack.mirror.Mirror,
+ force: bool,
+ update_index: bool,
+ base_image: Optional[str],
+ ) -> None:
+ super().__init__(mirror, force, update_index)
+ self.target_image = spack.oci.oci.image_from_mirror(mirror)
+ self.base_image = ImageReference.from_string(base_image) if base_image else None
+
+ def push(
+ self, specs: List[spack.spec.Spec]
+ ) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
+ skipped, base_images, checksums, upload_errors = _oci_push(
+ target_image=self.target_image,
+ base_image=self.base_image,
+ installed_specs_with_deps=specs,
+ force=self.force,
+ tmpdir=self.tmpdir,
+ executor=self.executor,
)
- return skipped
+ self._base_images = base_images
+ self._checksums = checksums
-def push(
- specs: List[Spec],
- out_url: str,
- signing_key: Optional[str],
+ # only update index if any binaries were uploaded
+ if self.update_index and len(skipped) + len(upload_errors) < len(specs):
+ _oci_update_index(self.target_image, self.tmpdir, self.executor)
+
+ return skipped, upload_errors
+
+ def tag(self, tag: str, roots: List[spack.spec.Spec]):
+ tagged_image = self.target_image.with_tag(tag)
+
+ # _push_oci may not populate self._base_images if binaries were already in the registry
+ for spec in roots:
+ _oci_update_base_images(
+ base_image=self.base_image,
+ target_image=self.target_image,
+ spec=spec,
+ base_image_cache=self._base_images,
+ )
+ _oci_put_manifest(
+ self._base_images, self._checksums, tagged_image, self.tmpdir, None, None, *roots
+ )
+
+
+class URLUploader(Uploader):
+ def __init__(
+ self,
+ mirror: spack.mirror.Mirror,
+ force: bool,
+ update_index: bool,
+ signing_key: Optional[str],
+ ) -> None:
+ super().__init__(mirror, force, update_index)
+ self.url = mirror.push_url
+ self.signing_key = signing_key
+
+ def push(
+ self, specs: List[spack.spec.Spec]
+ ) -> Tuple[List[spack.spec.Spec], List[Tuple[spack.spec.Spec, BaseException]]]:
+ return _url_push(
+ specs,
+ out_url=self.url,
+ force=self.force,
+ update_index=self.update_index,
+ signing_key=self.signing_key,
+ tmpdir=self.tmpdir,
+ executor=self.executor,
+ )
+
+
+def make_uploader(
+ mirror: spack.mirror.Mirror,
force: bool = False,
update_index: bool = False,
-) -> Tuple[List[Spec], List[Tuple[Spec, BaseException]]]:
- """Pushes to the provided build cache, and returns a list of skipped specs that were already
- present (when force=False). Does not raise on error."""
- with default_push_context() as (tmpdir, executor):
- return _push(specs, out_url, signing_key, force, update_index, tmpdir, executor)
+ signing_key: Optional[str] = None,
+ base_image: Optional[str] = None,
+) -> Uploader:
+ """Builder for the appropriate uploader based on the mirror type"""
+ if mirror.push_url.startswith("oci://"):
+ return OCIUploader(
+ mirror=mirror, force=force, update_index=update_index, base_image=base_image
+ )
+ else:
+ return URLUploader(
+ mirror=mirror, force=force, update_index=update_index, signing_key=signing_key
+ )
+
+
+def _format_spec(spec: Spec) -> str:
+ return spec.cformat("{name}{@version}{/hash:7}")
class FancyProgress:
@@ -1234,7 +1330,7 @@ class FancyProgress:
tty.info(f"{self.pre}Failed to push {self.pretty_spec}")
-def _push(
+def _url_push(
specs: List[Spec],
out_url: str,
signing_key: Optional[str],
@@ -1279,7 +1375,7 @@ def _push(
upload_futures = [
executor.submit(
- _upload_tarball_and_specfile,
+ _url_upload_tarball_and_specfile,
spec,
tmpdir,
out_url,
@@ -1309,12 +1405,12 @@ def _push(
if signing_key:
keys_tmpdir = os.path.join(tmpdir, "keys")
os.mkdir(keys_tmpdir)
- push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
+ _url_push_keys(out_url, keys=[signing_key], update_index=update_index, tmpdir=keys_tmpdir)
if update_index:
index_tmpdir = os.path.join(tmpdir, "index")
os.mkdir(index_tmpdir)
- generate_package_index(out_url, index_tmpdir)
+ _url_generate_package_index(out_url, index_tmpdir)
return skipped, errors
@@ -1517,7 +1613,7 @@ def _oci_update_base_images(
)
-def _push_oci(
+def _oci_push(
*,
target_image: ImageReference,
base_image: Optional[ImageReference],
@@ -2643,7 +2739,7 @@ def get_keys(install=False, trust=False, force=False, mirrors=None):
)
-def push_keys(
+def _url_push_keys(
*mirrors: Union[spack.mirror.Mirror, str],
keys: List[str],
tmpdir: str,
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index 842e944a9a..fc847d18e6 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -1382,8 +1382,10 @@ def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: b
"""
tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
signing_key = bindist.select_signing_key() if sign_binaries else None
+ mirror = spack.mirror.Mirror.from_url(mirror_url)
try:
- bindist.push_or_raise([spec], out_url=mirror_url, signing_key=signing_key)
+ with bindist.make_uploader(mirror, signing_key=signing_key) as uploader:
+ uploader.push_or_raise([spec])
return True
except bindist.PushToBuildCacheError as e:
tty.error(f"Problem writing to {mirror_url}: {e}")
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index d08c0fa783..fb0ffbd78f 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -37,7 +37,6 @@ import spack.util.web as web_util
from spack import traverse
from spack.cmd import display_specs
from spack.cmd.common import arguments
-from spack.oci.image import ImageReference
from spack.spec import Spec, save_dependency_specfiles
description = "create, download and install binary packages"
@@ -392,13 +391,8 @@ def push_fn(args):
else:
roots = spack.cmd.require_active_env(cmd_name="buildcache push").concrete_roots()
- mirror: spack.mirror.Mirror = args.mirror
-
- # Check if this is an OCI image.
- try:
- target_image = spack.oci.oci.image_from_mirror(mirror)
- except ValueError:
- target_image = None
+ mirror = args.mirror
+ assert isinstance(mirror, spack.mirror.Mirror)
push_url = mirror.push_url
@@ -409,12 +403,11 @@ def push_fn(args):
unsigned = not (args.key or args.signed)
# For OCI images, we require dependencies to be pushed for now.
- if target_image:
- if not unsigned:
- tty.warn(
- "Code signing is currently not supported for OCI images. "
- "Use --unsigned to silence this warning."
- )
+ if mirror.push_url.startswith("oci://") and not unsigned:
+ tty.warn(
+ "Code signing is currently not supported for OCI images. "
+ "Use --unsigned to silence this warning."
+ )
unsigned = True
# Select a signing key, or None if unsigned.
@@ -445,49 +438,17 @@ def push_fn(args):
(s, PackageNotInstalledError("package not installed")) for s in not_installed
)
- with bindist.default_push_context() as (tmpdir, executor):
- if target_image:
- base_image = ImageReference.from_string(args.base_image) if args.base_image else None
- skipped, base_images, checksums, upload_errors = bindist._push_oci(
- target_image=target_image,
- base_image=base_image,
- installed_specs_with_deps=specs,
- force=args.force,
- tmpdir=tmpdir,
- executor=executor,
- )
-
- if upload_errors:
- failed.extend(upload_errors)
-
- # Apart from creating manifests for each individual spec, we allow users to create a
- # separate image tag for all root specs and their runtime dependencies.
- elif args.tag:
- tagged_image = target_image.with_tag(args.tag)
- # _push_oci may not populate base_images if binaries were already in the registry
- for spec in roots:
- bindist._oci_update_base_images(
- base_image=base_image,
- target_image=target_image,
- spec=spec,
- base_image_cache=base_images,
- )
- bindist._oci_put_manifest(
- base_images, checksums, tagged_image, tmpdir, None, None, *roots
- )
- tty.info(f"Tagged {tagged_image}")
-
- else:
- skipped, upload_errors = bindist._push(
- specs,
- out_url=push_url,
- force=args.force,
- update_index=args.update_index,
- signing_key=signing_key,
- tmpdir=tmpdir,
- executor=executor,
- )
- failed.extend(upload_errors)
+ with bindist.make_uploader(
+ mirror=mirror,
+ force=args.force,
+ update_index=args.update_index,
+ signing_key=signing_key,
+ base_image=args.base_image,
+ ) as uploader:
+ skipped, upload_errors = uploader.push(specs=specs)
+ failed.extend(upload_errors)
+ if not upload_errors and args.tag:
+ uploader.tag(args.tag, roots)
if skipped:
if len(specs) == 1:
@@ -520,13 +481,6 @@ def push_fn(args):
),
)
- # Update the OCI index if requested
- if target_image and len(skipped) < len(specs) and args.update_index:
- with tempfile.TemporaryDirectory(
- dir=spack.stage.get_stage_root()
- ) as tmpdir, spack.util.parallel.make_concurrent_executor() as executor:
- bindist._oci_update_index(target_image, tmpdir, executor)
-
def install_fn(args):
"""install from a binary package"""
@@ -814,7 +768,7 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
url = mirror.push_url
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
- bindist.generate_package_index(url, tmpdir)
+ bindist._url_generate_package_index(url, tmpdir)
if update_keys:
keys_url = url_util.join(
diff --git a/lib/spack/spack/cmd/gpg.py b/lib/spack/spack/cmd/gpg.py
index d16b03b7bc..288212b51f 100644
--- a/lib/spack/spack/cmd/gpg.py
+++ b/lib/spack/spack/cmd/gpg.py
@@ -224,7 +224,7 @@ def gpg_publish(args):
mirror = spack.mirror.Mirror(args.mirror_url, args.mirror_url)
with tempfile.TemporaryDirectory(dir=spack.stage.get_stage_root()) as tmpdir:
- spack.binary_distribution.push_keys(
+ spack.binary_distribution._url_push_keys(
mirror, keys=args.keys, tmpdir=tmpdir, update_index=args.update_index
)
diff --git a/lib/spack/spack/hooks/autopush.py b/lib/spack/spack/hooks/autopush.py
index cb951b7b4b..05fad82ef9 100644
--- a/lib/spack/spack/hooks/autopush.py
+++ b/lib/spack/spack/hooks/autopush.py
@@ -24,5 +24,6 @@ def post_install(spec, explicit):
# Push the package to all autopush mirrors
for mirror in spack.mirror.MirrorCollection(binary=True, autopush=True).values():
signing_key = bindist.select_signing_key() if mirror.signed else None
- bindist.push_or_raise([spec], out_url=mirror.push_url, signing_key=signing_key, force=True)
+ with bindist.make_uploader(mirror=mirror, force=True, signing_key=signing_key) as uploader:
+ uploader.push_or_raise([spec])
tty.msg(f"{spec.name}: Pushed to build cache: '{mirror.name}'")
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index 06cff7f5f7..996428ce59 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -357,7 +357,7 @@ def test_push_and_fetch_keys(mock_gnupghome, tmp_path):
assert len(keys) == 1
fpr = keys[0]
- bindist.push_keys(mirror, keys=[fpr], tmpdir=str(tmp_path), update_index=True)
+ bindist._url_push_keys(mirror, keys=[fpr], tmpdir=str(tmp_path), update_index=True)
# dir 2: import the key from the mirror, and confirm that its fingerprint
# matches the one created above
@@ -492,7 +492,7 @@ def test_generate_package_index_failure(monkeypatch, tmp_path, capfd):
test_url = "file:///fake/keys/dir"
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
- bindist.generate_package_index(test_url, str(tmp_path))
+ bindist._url_generate_package_index(test_url, str(tmp_path))
assert (
"Warning: Encountered problem listing packages at "
@@ -513,7 +513,7 @@ def test_generate_indices_exception(monkeypatch, tmp_path, capfd):
bindist.generate_key_index(url, str(tmp_path))
with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
- bindist.generate_package_index(url, str(tmp_path))
+ bindist._url_generate_package_index(url, str(tmp_path))
assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err
diff --git a/lib/spack/spack/test/build_distribution.py b/lib/spack/spack/test/build_distribution.py
index 4ad621ab0d..cdfd961e3a 100644
--- a/lib/spack/spack/test/build_distribution.py
+++ b/lib/spack/spack/test/build_distribution.py
@@ -10,6 +10,7 @@ import pytest
import spack.binary_distribution as bd
import spack.main
+import spack.mirror
import spack.spec
import spack.util.url
@@ -22,17 +23,21 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
specs = [spec]
- # Runs fine the first time, second time it's a no-op
- out_url = spack.util.url.path_to_file_url(str(tmp_path))
- skipped = bd.push_or_raise(specs, out_url, signing_key=None)
- assert not skipped
+ # populate cache, everything is new
+ mirror = spack.mirror.Mirror.from_local_path(str(tmp_path))
+ with bd.make_uploader(mirror) as uploader:
+ skipped = uploader.push_or_raise(specs)
+ assert not skipped
- skipped = bd.push_or_raise(specs, out_url, signing_key=None)
- assert skipped == specs
+ # should skip all
+ with bd.make_uploader(mirror) as uploader:
+ skipped = uploader.push_or_raise(specs)
+ assert skipped == specs
- # Should work fine with force=True
- skipped = bd.push_or_raise(specs, out_url, signing_key=None, force=True)
- assert not skipped
+ # with force=True none should be skipped
+ with bd.make_uploader(mirror, force=True) as uploader:
+ skipped = uploader.push_or_raise(specs)
+ assert not skipped
# Remove the tarball, which should cause push to push.
os.remove(
@@ -42,5 +47,6 @@ def test_build_tarball_overwrite(install_mockery, mock_fetch, monkeypatch, tmp_p
/ bd.tarball_name(spec, ".spack")
)
- skipped = bd.push_or_raise(specs, out_url, signing_key=None)
- assert not skipped
+ with bd.make_uploader(mirror) as uploader:
+ skipped = uploader.push_or_raise(specs)
+ assert not skipped
diff --git a/lib/spack/spack/test/cmd/buildcache.py b/lib/spack/spack/test/cmd/buildcache.py
index fb56bb7335..8b8cff175c 100644
--- a/lib/spack/spack/test/cmd/buildcache.py
+++ b/lib/spack/spack/test/cmd/buildcache.py
@@ -7,6 +7,7 @@ import errno
import json
import os
import shutil
+from typing import List
import pytest
@@ -16,6 +17,7 @@ import spack.deptypes
import spack.environment as ev
import spack.error
import spack.main
+import spack.mirror
import spack.spec
import spack.util.url
from spack.spec import Spec
@@ -380,18 +382,22 @@ def test_correct_specs_are_pushed(
# Concretize dttop and add it to the temporary database (without prefixes)
spec = default_mock_concretization("dttop")
temporary_store.db.add(spec, directory_layout=None)
- slash_hash = "/{0}".format(spec.dag_hash())
+ slash_hash = f"/{spec.dag_hash()}"
- packages_to_push = []
+ class DontUpload(spack.binary_distribution.Uploader):
+ def __init__(self):
+ super().__init__(spack.mirror.Mirror.from_local_path(str(tmpdir)), False, False)
+ self.pushed = []
- def fake_push(specs, *args, **kwargs):
- assert all(isinstance(s, Spec) for s in specs)
- packages_to_push.extend(s.name for s in specs)
- skipped = []
- errors = []
- return skipped, errors
+ def push(self, specs: List[spack.spec.Spec]):
+ self.pushed.extend(s.name for s in specs)
+ return [], [] # nothing skipped, nothing errored
- monkeypatch.setattr(spack.binary_distribution, "_push", fake_push)
+ uploader = DontUpload()
+
+ monkeypatch.setattr(
+ spack.binary_distribution, "make_uploader", lambda *args, **kwargs: uploader
+ )
buildcache_create_args = ["create", "--unsigned"]
@@ -403,10 +409,10 @@ def test_correct_specs_are_pushed(
buildcache(*buildcache_create_args)
# Order is not guaranteed, so we can't just compare lists
- assert set(packages_to_push) == set(expected)
+ assert set(uploader.pushed) == set(expected)
# Ensure no duplicates
- assert len(set(packages_to_push)) == len(packages_to_push)
+ assert len(set(uploader.pushed)) == len(uploader.pushed)
@pytest.mark.parametrize("signed", [True, False])
diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py
index 012c119ad7..0eaabfd5a7 100644
--- a/lib/spack/spack/test/cmd/ci.py
+++ b/lib/spack/spack/test/cmd/ci.py
@@ -687,7 +687,7 @@ def test_ci_rebuild_mock_failure_to_push(
"Encountered problem pushing binary <url>: <expection>"
)
- monkeypatch.setattr(spack.binary_distribution, "push_or_raise", mock_push_or_raise)
+ monkeypatch.setattr(spack.binary_distribution.Uploader, "push_or_raise", mock_push_or_raise)
with working_dir(rebuild_env.env_dir):
activate_rebuild_env(tmp_path, pkg_name, rebuild_env)
@@ -1022,7 +1022,7 @@ def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
def push_or_raise(*args, **kwargs):
raise spack.binary_distribution.PushToBuildCacheError("Error: Access Denied")
- monkeypatch.setattr(spack.binary_distribution, "push_or_raise", push_or_raise)
+ monkeypatch.setattr(spack.binary_distribution.Uploader, "push_or_raise", push_or_raise)
# Input doesn't matter, as we are faking exceptional output
url = tmp_path.as_uri()
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index b141583031..35bdba2326 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -610,10 +610,9 @@ def test_install_from_binary_with_missing_patch_succeeds(
temporary_store.db.add(s, directory_layout=temporary_store.layout, explicit=True)
# Push it to a binary cache
- build_cache = tmp_path / "my_build_cache"
- binary_distribution.push_or_raise(
- [s], out_url=build_cache.as_uri(), signing_key=None, force=False
- )
+ mirror = spack.mirror.Mirror.from_local_path(str(tmp_path / "my_build_cache"))
+ with binary_distribution.make_uploader(mirror=mirror) as uploader:
+ uploader.push_or_raise([s])
# Now re-install it.
s.package.do_uninstall()
@@ -624,7 +623,7 @@ def test_install_from_binary_with_missing_patch_succeeds(
s.package.do_install()
# Binary install: succeeds, we don't need the patch.
- spack.mirror.add(spack.mirror.Mirror.from_local_path(str(build_cache)))
+ spack.mirror.add(mirror)
s.package.do_install(package_cache_only=True, dependencies_cache_only=True, unsigned=True)
assert temporary_store.db.query_local_by_spec_hash(s.dag_hash())