summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorkwryankrattiger <80296582+kwryankrattiger@users.noreply.github.com>2024-03-28 11:02:41 -0500
committerGitHub <noreply@github.com>2024-03-28 17:02:41 +0100
commitae2d0ff1cdd03120fc11bffcf8eb2487c38145c4 (patch)
tree109fa23c409af760576606a3e74b2c2b3a700228
parent7e906ced75dead665048a254c0ca2f09205c9d47 (diff)
downloadspack-ae2d0ff1cdd03120fc11bffcf8eb2487c38145c4.tar.gz
spack-ae2d0ff1cdd03120fc11bffcf8eb2487c38145c4.tar.bz2
spack-ae2d0ff1cdd03120fc11bffcf8eb2487c38145c4.tar.xz
spack-ae2d0ff1cdd03120fc11bffcf8eb2487c38145c4.zip
CI: fail the rebuild command if buildcache push failed (#40045)
-rw-r--r--lib/spack/spack/binary_distribution.py282
-rw-r--r--lib/spack/spack/ci.py54
-rw-r--r--lib/spack/spack/cmd/buildcache.py12
-rw-r--r--lib/spack/spack/cmd/ci.py38
-rw-r--r--lib/spack/spack/test/bindist.py61
-rw-r--r--lib/spack/spack/test/ci.py2
-rw-r--r--lib/spack/spack/test/cmd/ci.py97
-rw-r--r--lib/spack/spack/util/web.py6
8 files changed, 279 insertions, 273 deletions
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index e3075c5fbd..4292d6449d 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -17,7 +17,6 @@ import sys
import tarfile
import tempfile
import time
-import traceback
import urllib.error
import urllib.parse
import urllib.request
@@ -111,10 +110,6 @@ class FetchCacheError(Exception):
super().__init__(self.message)
-class ListMirrorSpecsError(spack.error.SpackError):
- """Raised when unable to retrieve list of specs from the mirror"""
-
-
class BinaryCacheIndex:
"""
The BinaryCacheIndex tracks what specs are available on (usually remote)
@@ -541,83 +536,6 @@ def binary_index_location():
BINARY_INDEX: BinaryCacheIndex = llnl.util.lang.Singleton(BinaryCacheIndex) # type: ignore
-class NoOverwriteException(spack.error.SpackError):
- """Raised when a file would be overwritten"""
-
- def __init__(self, file_path):
- super().__init__(f"Refusing to overwrite the following file: {file_path}")
-
-
-class NoGpgException(spack.error.SpackError):
- """
- Raised when gpg2 is not in PATH
- """
-
- def __init__(self, msg):
- super().__init__(msg)
-
-
-class NoKeyException(spack.error.SpackError):
- """
- Raised when gpg has no default key added.
- """
-
- def __init__(self, msg):
- super().__init__(msg)
-
-
-class PickKeyException(spack.error.SpackError):
- """
- Raised when multiple keys can be used to sign.
- """
-
- def __init__(self, keys):
- err_msg = "Multiple keys available for signing\n%s\n" % keys
- err_msg += "Use spack buildcache create -k <key hash> to pick a key."
- super().__init__(err_msg)
-
-
-class NoVerifyException(spack.error.SpackError):
- """
- Raised if file fails signature verification.
- """
-
- pass
-
-
-class NoChecksumException(spack.error.SpackError):
- """
- Raised if file fails checksum verification.
- """
-
- def __init__(self, path, size, contents, algorithm, expected, computed):
- super().__init__(
- f"{algorithm} checksum failed for {path}",
- f"Expected {expected} but got {computed}. "
- f"File size = {size} bytes. Contents = {contents!r}",
- )
-
-
-class NewLayoutException(spack.error.SpackError):
- """
- Raised if directory layout is different from buildcache.
- """
-
- def __init__(self, msg):
- super().__init__(msg)
-
-
-class InvalidMetadataFile(spack.error.SpackError):
- pass
-
-
-class UnsignedPackageException(spack.error.SpackError):
- """
- Raised if installation of unsigned package is attempted without
- the use of ``--no-check-signature``.
- """
-
-
def compute_hash(data):
if isinstance(data, str):
data = data.encode("utf-8")
@@ -992,15 +910,10 @@ def _specs_from_cache_fallback(cache_prefix):
if entry.endswith("spec.json") or entry.endswith("spec.json.sig")
]
read_fn = url_read_method
- except KeyError as inst:
- msg = "No packages at {0}: {1}".format(cache_prefix, inst)
- tty.warn(msg)
except Exception as err:
- # If we got some kind of S3 (access denied or other connection
- # error), the first non boto-specific class in the exception
- # hierarchy is Exception. Just print a warning and return
- msg = "Encountered problem listing packages at {0}: {1}".format(cache_prefix, err)
- tty.warn(msg)
+ # If we got some kind of S3 (access denied or other connection error), the first non
+ # boto-specific class in the exception is Exception. Just print a warning and return
+ tty.warn(f"Encountered problem listing packages at {cache_prefix}: {err}")
return file_list, read_fn
@@ -1047,11 +960,10 @@ def generate_package_index(cache_prefix, concurrency=32):
"""
try:
file_list, read_fn = _spec_files_from_cache(cache_prefix)
- except ListMirrorSpecsError as err:
- tty.error("Unable to generate package index, {0}".format(err))
- return
+ except ListMirrorSpecsError as e:
+ raise GenerateIndexError(f"Unable to generate package index: {e}") from e
- tty.debug("Retrieving spec descriptor files from {0} to build index".format(cache_prefix))
+ tty.debug(f"Retrieving spec descriptor files from {cache_prefix} to build index")
tmpdir = tempfile.mkdtemp()
@@ -1061,27 +973,22 @@ def generate_package_index(cache_prefix, concurrency=32):
try:
_read_specs_and_push_index(file_list, read_fn, cache_prefix, db, db_root_dir, concurrency)
- except Exception as err:
- msg = "Encountered problem pushing package index to {0}: {1}".format(cache_prefix, err)
- tty.warn(msg)
- tty.debug("\n" + traceback.format_exc())
+ except Exception as e:
+ raise GenerateIndexError(
+ f"Encountered problem pushing package index to {cache_prefix}: {e}"
+ ) from e
finally:
- shutil.rmtree(tmpdir)
+ shutil.rmtree(tmpdir, ignore_errors=True)
def generate_key_index(key_prefix, tmpdir=None):
"""Create the key index page.
- Creates (or replaces) the "index.json" page at the location given in
- key_prefix. This page contains an entry for each key (.pub) under
- key_prefix.
+ Creates (or replaces) the "index.json" page at the location given in key_prefix. This page
+ contains an entry for each key (.pub) under key_prefix.
"""
- tty.debug(
- " ".join(
- ("Retrieving key.pub files from", url_util.format(key_prefix), "to build key index")
- )
- )
+ tty.debug(f"Retrieving key.pub files from {url_util.format(key_prefix)} to build key index")
try:
fingerprints = (
@@ -1089,17 +996,8 @@ def generate_key_index(key_prefix, tmpdir=None):
for entry in web_util.list_url(key_prefix, recursive=False)
if entry.endswith(".pub")
)
- except KeyError as inst:
- msg = "No keys at {0}: {1}".format(key_prefix, inst)
- tty.warn(msg)
- return
- except Exception as err:
- # If we got some kind of S3 (access denied or other connection
- # error), the first non boto-specific class in the exception
- # hierarchy is Exception. Just print a warning and return
- msg = "Encountered problem listing keys at {0}: {1}".format(key_prefix, err)
- tty.warn(msg)
- return
+ except Exception as e:
+ raise CannotListKeys(f"Encountered problem listing keys at {key_prefix}: {e}") from e
remove_tmpdir = False
@@ -1124,12 +1022,13 @@ def generate_key_index(key_prefix, tmpdir=None):
keep_original=False,
extra_args={"ContentType": "application/json"},
)
- except Exception as err:
- msg = "Encountered problem pushing key index to {0}: {1}".format(key_prefix, err)
- tty.warn(msg)
+ except Exception as e:
+ raise GenerateIndexError(
+ f"Encountered problem pushing key index to {key_prefix}: {e}"
+ ) from e
finally:
if remove_tmpdir:
- shutil.rmtree(tmpdir)
+ shutil.rmtree(tmpdir, ignore_errors=True)
def tarfile_of_spec_prefix(tar: tarfile.TarFile, prefix: str) -> None:
@@ -1200,7 +1099,8 @@ def push_or_raise(spec: Spec, out_url: str, options: PushOptions):
used at the mirror (following <tarball_directory_name>).
This method raises :py:class:`NoOverwriteException` when ``force=False`` and the tarball or
- spec.json file already exist in the buildcache.
+ spec.json file already exist in the buildcache. It raises :py:class:`PushToBuildCacheError`
+ when the tarball or spec.json file cannot be pushed to the buildcache.
"""
if not spec.concrete:
raise ValueError("spec must be concrete to build tarball")
@@ -1278,13 +1178,18 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
key = select_signing_key(options.key)
sign_specfile(key, options.force, specfile_path)
- # push tarball and signed spec json to remote mirror
- web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
- web_util.push_to_url(
- signed_specfile_path if not options.unsigned else specfile_path,
- remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
- keep_original=False,
- )
+ try:
+ # push tarball and signed spec json to remote mirror
+ web_util.push_to_url(spackfile_path, remote_spackfile_path, keep_original=False)
+ web_util.push_to_url(
+ signed_specfile_path if not options.unsigned else specfile_path,
+ remote_signed_specfile_path if not options.unsigned else remote_specfile_path,
+ keep_original=False,
+ )
+ except Exception as e:
+ raise PushToBuildCacheError(
+ f"Encountered problem pushing binary {remote_spackfile_path}: {e}"
+ ) from e
# push the key to the build cache's _pgp directory so it can be
# imported
@@ -1296,8 +1201,6 @@ def _build_tarball_in_stage_dir(spec: Spec, out_url: str, stage_dir: str, option
if options.regenerate_index:
generate_package_index(url_util.join(out_url, os.path.relpath(cache_prefix, stage_dir)))
- return None
-
class NotInstalledError(spack.error.SpackError):
"""Raised when a spec is not installed but picked to be packaged."""
@@ -1352,28 +1255,6 @@ def specs_to_be_packaged(
return [s for s in itertools.chain(roots, deps) if not s.external]
-def push(spec: Spec, mirror_url: str, options: PushOptions):
- """Create and push binary package for a single spec to the specified
- mirror url.
-
- Args:
- spec: Spec to package and push
- mirror_url: Desired destination url for binary package
- options:
-
- Returns:
- True if package was pushed, False otherwise.
-
- """
- try:
- push_or_raise(spec, mirror_url, options)
- except NoOverwriteException as e:
- warnings.warn(str(e))
- return False
-
- return True
-
-
def try_verify(specfile_path):
"""Utility function to attempt to verify a local file. Assumes the
file is a clearsigned signature file.
@@ -2706,3 +2587,96 @@ class OCIIndexFetcher:
raise FetchIndexError(f"Remote index {url_manifest} is invalid")
return FetchIndexResult(etag=None, hash=index_digest.digest, data=result, fresh=False)
+
+
+class NoOverwriteException(spack.error.SpackError):
+ """Raised when a file would be overwritten"""
+
+ def __init__(self, file_path):
+ super().__init__(f"Refusing to overwrite the following file: {file_path}")
+
+
+class NoGpgException(spack.error.SpackError):
+ """
+ Raised when gpg2 is not in PATH
+ """
+
+ def __init__(self, msg):
+ super().__init__(msg)
+
+
+class NoKeyException(spack.error.SpackError):
+ """
+ Raised when gpg has no default key added.
+ """
+
+ def __init__(self, msg):
+ super().__init__(msg)
+
+
+class PickKeyException(spack.error.SpackError):
+ """
+ Raised when multiple keys can be used to sign.
+ """
+
+ def __init__(self, keys):
+ err_msg = "Multiple keys available for signing\n%s\n" % keys
+ err_msg += "Use spack buildcache create -k <key hash> to pick a key."
+ super().__init__(err_msg)
+
+
+class NoVerifyException(spack.error.SpackError):
+ """
+ Raised if file fails signature verification.
+ """
+
+ pass
+
+
+class NoChecksumException(spack.error.SpackError):
+ """
+ Raised if file fails checksum verification.
+ """
+
+ def __init__(self, path, size, contents, algorithm, expected, computed):
+ super().__init__(
+ f"{algorithm} checksum failed for {path}",
+ f"Expected {expected} but got {computed}. "
+ f"File size = {size} bytes. Contents = {contents!r}",
+ )
+
+
+class NewLayoutException(spack.error.SpackError):
+ """
+ Raised if directory layout is different from buildcache.
+ """
+
+ def __init__(self, msg):
+ super().__init__(msg)
+
+
+class InvalidMetadataFile(spack.error.SpackError):
+ pass
+
+
+class UnsignedPackageException(spack.error.SpackError):
+ """
+ Raised if installation of unsigned package is attempted without
+ the use of ``--no-check-signature``.
+ """
+
+
+class ListMirrorSpecsError(spack.error.SpackError):
+ """Raised when unable to retrieve list of specs from the mirror"""
+
+
+class GenerateIndexError(spack.error.SpackError):
+ """Raised when unable to generate key or package index for mirror"""
+
+
+class CannotListKeys(GenerateIndexError):
+ """Raised when unable to list keys when generating key index"""
+
+
+class PushToBuildCacheError(spack.error.SpackError):
+ """Raised when unable to push objects to binary mirror"""
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index 96dd58a4f4..5fc5b0946f 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -1463,45 +1463,39 @@ def can_verify_binaries():
return len(gpg_util.public_keys()) >= 1
-def _push_mirror_contents(input_spec, sign_binaries, mirror_url):
+def _push_to_build_cache(spec: spack.spec.Spec, sign_binaries: bool, mirror_url: str) -> None:
"""Unchecked version of the public API, for easier mocking"""
- unsigned = not sign_binaries
- tty.debug(f"Creating buildcache ({'unsigned' if unsigned else 'signed'})")
- push_url = spack.mirror.Mirror.from_url(mirror_url).push_url
- return bindist.push(input_spec, push_url, bindist.PushOptions(force=True, unsigned=unsigned))
+ bindist.push_or_raise(
+ spec,
+ spack.mirror.Mirror.from_url(mirror_url).push_url,
+ bindist.PushOptions(force=True, unsigned=not sign_binaries),
+ )
-def push_mirror_contents(input_spec: spack.spec.Spec, mirror_url, sign_binaries):
+def push_to_build_cache(spec: spack.spec.Spec, mirror_url: str, sign_binaries: bool) -> bool:
"""Push one or more binary packages to the mirror.
Arguments:
- input_spec(spack.spec.Spec): Installed spec to push
- mirror_url (str): Base url of target mirror
- sign_binaries (bool): If True, spack will attempt to sign binary
- package before pushing.
+ spec: Installed spec to push
+ mirror_url: URL of target mirror
+ sign_binaries: If True, spack will attempt to sign binary package before pushing.
"""
+ tty.debug(f"Pushing to build cache ({'signed' if sign_binaries else 'unsigned'})")
try:
- return _push_mirror_contents(input_spec, sign_binaries, mirror_url)
- except Exception as inst:
- # If the mirror we're pushing to is on S3 and there's some
- # permissions problem, for example, we can't just target
- # that exception type here, since users of the
- # `spack ci rebuild' may not need or want any dependency
- # on boto3. So we use the first non-boto exception type
- # in the heirarchy:
- # boto3.exceptions.S3UploadFailedError
- # boto3.exceptions.Boto3Error
- # Exception
- # BaseException
- # object
- err_msg = f"Error msg: {inst}"
- if any(x in err_msg for x in ["Access Denied", "InvalidAccessKeyId"]):
- tty.msg(f"Permission problem writing to {mirror_url}")
- tty.msg(err_msg)
+ _push_to_build_cache(spec, sign_binaries, mirror_url)
+ return True
+ except bindist.PushToBuildCacheError as e:
+ tty.error(str(e))
+ return False
+ except Exception as e:
+ # TODO (zackgalbreath): write an adapter for boto3 exceptions so we can catch a specific
+ # exception instead of parsing str(e)...
+ msg = str(e)
+ if any(x in msg for x in ["Access Denied", "InvalidAccessKeyId"]):
+ tty.error(f"Permission problem writing to {mirror_url}: {msg}")
return False
- else:
- raise inst
+ raise
def remove_other_mirrors(mirrors_to_keep, scope=None):
@@ -2124,7 +2118,7 @@ def create_buildcache(
for mirror_url in destination_mirror_urls:
results.append(
PushResult(
- success=push_mirror_contents(input_spec, mirror_url, sign_binaries), url=mirror_url
+ success=push_to_build_cache(input_spec, mirror_url, sign_binaries), url=mirror_url
)
)
diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py
index ad50cb94a4..49f3d607d2 100644
--- a/lib/spack/spack/cmd/buildcache.py
+++ b/lib/spack/spack/cmd/buildcache.py
@@ -1196,14 +1196,18 @@ def update_index(mirror: spack.mirror.Mirror, update_keys=False):
url, bindist.build_cache_relative_path(), bindist.build_cache_keys_relative_path()
)
- bindist.generate_key_index(keys_url)
+ try:
+ bindist.generate_key_index(keys_url)
+ except bindist.CannotListKeys as e:
+ # Do not error out if listing keys went wrong. This usually means that the _gpg path
+ # does not exist. TODO: distinguish between this and other errors.
+ tty.warn(f"did not update the key index: {e}")
def update_index_fn(args):
"""update a buildcache index"""
- update_index(args.mirror, update_keys=args.keys)
+ return update_index(args.mirror, update_keys=args.keys)
def buildcache(parser, args):
- if args.func:
- args.func(args)
+ return args.func(args)
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
index 80aa1634e3..9ed0fc1a51 100644
--- a/lib/spack/spack/cmd/ci.py
+++ b/lib/spack/spack/cmd/ci.py
@@ -14,6 +14,7 @@ import llnl.util.tty.color as clr
import spack.binary_distribution as bindist
import spack.ci as spack_ci
+import spack.cmd
import spack.cmd.buildcache as buildcache
import spack.config as cfg
import spack.environment as ev
@@ -32,6 +33,7 @@ level = "long"
SPACK_COMMAND = "spack"
MAKE_COMMAND = "make"
INSTALL_FAIL_CODE = 1
+FAILED_CREATE_BUILDCACHE_CODE = 100
def deindent(desc):
@@ -705,11 +707,9 @@ def ci_rebuild(args):
cdash_handler.report_skipped(job_spec, reports_dir, reason=msg)
cdash_handler.copy_test_results(reports_dir, job_test_dir)
- # If the install succeeded, create a buildcache entry for this job spec
- # and push it to one or more mirrors. If the install did not succeed,
- # print out some instructions on how to reproduce this build failure
- # outside of the pipeline environment.
if install_exit_code == 0:
+ # If the install succeeded, push it to one or more mirrors. Failure to push to any mirror
+ # will result in a non-zero exit code. Pushing is best-effort.
mirror_urls = [buildcache_mirror_url]
# TODO: Remove this block in Spack 0.23
@@ -721,13 +721,12 @@ def ci_rebuild(args):
destination_mirror_urls=mirror_urls,
sign_binaries=spack_ci.can_sign_binaries(),
):
- msg = tty.msg if result.success else tty.warn
- msg(
- "{} {} to {}".format(
- "Pushed" if result.success else "Failed to push",
- job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when()),
- result.url,
- )
+ if not result.success:
+ install_exit_code = FAILED_CREATE_BUILDCACHE_CODE
+ (tty.msg if result.success else tty.error)(
+ f'{"Pushed" if result.success else "Failed to push"} '
+ f'{job_spec.format("{name}{@version}{/hash:7}", color=clr.get_color_when())} '
+ f"to {result.url}"
)
# If this is a develop pipeline, check if the spec that we just built is
@@ -748,22 +747,22 @@ def ci_rebuild(args):
tty.warn(msg.format(broken_spec_path, err))
else:
+ # If the install did not succeed, print out some instructions on how to reproduce this
+ # build failure outside of the pipeline environment.
tty.debug("spack install exited non-zero, will not create buildcache")
api_root_url = os.environ.get("CI_API_V4_URL")
ci_project_id = os.environ.get("CI_PROJECT_ID")
ci_job_id = os.environ.get("CI_JOB_ID")
- repro_job_url = "{0}/projects/{1}/jobs/{2}/artifacts".format(
- api_root_url, ci_project_id, ci_job_id
- )
-
+ repro_job_url = f"{api_root_url}/projects/{ci_project_id}/jobs/{ci_job_id}/artifacts"
# Control characters cause this to be printed in blue so it stands out
- reproduce_msg = """
+ print(
+ f"""
\033[34mTo reproduce this build locally, run:
- spack ci reproduce-build {0} [--working-dir <dir>] [--autostart]
+ spack ci reproduce-build {repro_job_url} [--working-dir <dir>] [--autostart]
If this project does not have public pipelines, you will need to first:
@@ -771,12 +770,9 @@ If this project does not have public pipelines, you will need to first:
... then follow the printed instructions.\033[0;0m
-""".format(
- repro_job_url
+"""
)
- print(reproduce_msg)
-
rebuild_timer.stop()
try:
with open("install_timers.json", "w") as timelog:
diff --git a/lib/spack/spack/test/bindist.py b/lib/spack/spack/test/bindist.py
index ba0a7d7dec..8afd69e3c5 100644
--- a/lib/spack/spack/test/bindist.py
+++ b/lib/spack/spack/test/bindist.py
@@ -36,7 +36,7 @@ import spack.util.gpg
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
import spack.util.web as web_util
-from spack.binary_distribution import get_buildfile_manifest
+from spack.binary_distribution import CannotListKeys, GenerateIndexError, get_buildfile_manifest
from spack.directory_layout import DirectoryLayout
from spack.paths import test_path
from spack.spec import Spec
@@ -465,50 +465,57 @@ def test_generate_index_missing(monkeypatch, tmpdir, mutable_config):
assert "libelf" not in cache_list
-def test_generate_indices_key_error(monkeypatch, capfd):
+def test_generate_key_index_failure(monkeypatch):
+ def list_url(url, recursive=False):
+ if "fails-listing" in url:
+ raise Exception("Couldn't list the directory")
+ return ["first.pub", "second.pub"]
+
+ def push_to_url(*args, **kwargs):
+ raise Exception("Couldn't upload the file")
+
+ monkeypatch.setattr(web_util, "list_url", list_url)
+ monkeypatch.setattr(web_util, "push_to_url", push_to_url)
+
+ with pytest.raises(CannotListKeys, match="Encountered problem listing keys"):
+ bindist.generate_key_index("s3://non-existent/fails-listing")
+
+ with pytest.raises(GenerateIndexError, match="problem pushing .* Couldn't upload"):
+ bindist.generate_key_index("s3://non-existent/fails-uploading")
+
+
+def test_generate_package_index_failure(monkeypatch, capfd):
def mock_list_url(url, recursive=False):
- print("mocked list_url({0}, {1})".format(url, recursive))
- raise KeyError("Test KeyError handling")
+ raise Exception("Some HTTP error")
monkeypatch.setattr(web_util, "list_url", mock_list_url)
test_url = "file:///fake/keys/dir"
- # Make sure generate_key_index handles the KeyError
- bindist.generate_key_index(test_url)
+ with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
+ bindist.generate_package_index(test_url)
- err = capfd.readouterr()[1]
- assert "Warning: No keys at {0}".format(test_url) in err
-
- # Make sure generate_package_index handles the KeyError
- bindist.generate_package_index(test_url)
-
- err = capfd.readouterr()[1]
- assert "Warning: No packages at {0}".format(test_url) in err
+ assert (
+ f"Warning: Encountered problem listing packages at {test_url}: Some HTTP error"
+ in capfd.readouterr().err
+ )
def test_generate_indices_exception(monkeypatch, capfd):
def mock_list_url(url, recursive=False):
- print("mocked list_url({0}, {1})".format(url, recursive))
raise Exception("Test Exception handling")
monkeypatch.setattr(web_util, "list_url", mock_list_url)
- test_url = "file:///fake/keys/dir"
-
- # Make sure generate_key_index handles the Exception
- bindist.generate_key_index(test_url)
+ url = "file:///fake/keys/dir"
- err = capfd.readouterr()[1]
- expect = "Encountered problem listing keys at {0}".format(test_url)
- assert expect in err
+ with pytest.raises(GenerateIndexError, match=f"Encountered problem listing keys at {url}"):
+ bindist.generate_key_index(url)
- # Make sure generate_package_index handles the Exception
- bindist.generate_package_index(test_url)
+ with pytest.raises(GenerateIndexError, match="Unable to generate package index"):
+ bindist.generate_package_index(url)
- err = capfd.readouterr()[1]
- expect = "Encountered problem listing packages at {0}".format(test_url)
- assert expect in err
+ assert f"Encountered problem listing packages at {url}" in capfd.readouterr().err
@pytest.mark.usefixtures("mock_fetch", "install_mockery")
diff --git a/lib/spack/spack/test/ci.py b/lib/spack/spack/test/ci.py
index e01b970c2d..4eef036ddb 100644
--- a/lib/spack/spack/test/ci.py
+++ b/lib/spack/spack/test/ci.py
@@ -448,7 +448,7 @@ def test_ci_process_command_fail(repro_dir, monkeypatch):
def test_ci_create_buildcache(tmpdir, working_env, config, mock_packages, monkeypatch):
"""Test that create_buildcache returns a list of objects with the correct
keys and types."""
- monkeypatch.setattr(spack.ci, "push_mirror_contents", lambda a, b, c: True)
+ monkeypatch.setattr(spack.ci, "_push_to_build_cache", lambda a, b, c: True)
results = ci.create_buildcache(
None, destination_mirror_urls=["file:///fake-url-one", "file:///fake-url-two"]
diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py
index 8209040d87..8254220eec 100644
--- a/lib/spack/spack/test/cmd/ci.py
+++ b/lib/spack/spack/test/cmd/ci.py
@@ -26,6 +26,7 @@ import spack.repo as repo
import spack.util.gpg
import spack.util.spack_yaml as syaml
import spack.util.url as url_util
+from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE
from spack.schema.buildcache_spec import schema as specfile_schema
from spack.schema.ci import schema as ci_schema
from spack.schema.database_index import schema as db_idx_schema
@@ -47,6 +48,8 @@ pytestmark = [pytest.mark.not_on_windows("does not run on windows"), pytest.mark
@pytest.fixture()
def ci_base_environment(working_env, tmpdir):
os.environ["CI_PROJECT_DIR"] = tmpdir.strpath
+ os.environ["CI_PIPELINE_ID"] = "7192"
+ os.environ["CI_JOB_NAME"] = "mock"
@pytest.fixture(scope="function")
@@ -776,6 +779,43 @@ def test_ci_rebuild_mock_success(
assert "Cannot copy test logs" in out
+def test_ci_rebuild_mock_failure_to_push(
+ tmpdir,
+ working_env,
+ mutable_mock_env_path,
+ install_mockery_mutable_config,
+ mock_gnupghome,
+ mock_stage,
+ mock_fetch,
+ mock_binary_index,
+ ci_base_environment,
+ monkeypatch,
+):
+ pkg_name = "trivial-install-test-package"
+ rebuild_env = create_rebuild_env(tmpdir, pkg_name)
+
+ # Mock the install script succuess
+ def mock_success(*args, **kwargs):
+ return 0
+
+ monkeypatch.setattr(spack.ci, "process_command", mock_success)
+
+ # Mock failure to push to the build cache
+ def mock_push_or_raise(*args, **kwargs):
+ raise spack.binary_distribution.PushToBuildCacheError(
+ "Encountered problem pushing binary <url>: <expection>"
+ )
+
+ monkeypatch.setattr(spack.binary_distribution, "push_or_raise", mock_push_or_raise)
+
+ with rebuild_env.env_dir.as_cwd():
+ activate_rebuild_env(tmpdir, pkg_name, rebuild_env)
+
+ expect = f"Command exited with code {FAILED_CREATE_BUILDCACHE_CODE}"
+ with pytest.raises(spack.main.SpackCommandError, match=expect):
+ ci_cmd("rebuild", fail_on_error=True)
+
+
@pytest.mark.skip(reason="fails intermittently and covered by gitlab ci")
def test_ci_rebuild(
tmpdir,
@@ -1063,7 +1103,7 @@ spack:
@pytest.mark.disable_clean_stage_check
-def test_push_mirror_contents(
+def test_push_to_build_cache(
tmpdir,
mutable_mock_env_path,
install_mockery_mutable_config,
@@ -1124,7 +1164,7 @@ spack:
install_cmd("--add", "--keep-stage", json_path)
for s in concrete_spec.traverse():
- ci.push_mirror_contents(s, mirror_url, True)
+ ci.push_to_build_cache(s, mirror_url, True)
buildcache_path = os.path.join(mirror_dir.strpath, "build_cache")
@@ -1217,21 +1257,16 @@ spack:
assert len(dl_dir_list) == 2
-def test_push_mirror_contents_exceptions(monkeypatch, capsys):
- def failing_access(*args, **kwargs):
+def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys):
+ def _push_to_build_cache(spec, sign_binaries, mirror_url):
raise Exception("Error: Access Denied")
- monkeypatch.setattr(spack.ci, "_push_mirror_contents", failing_access)
-
- # Input doesn't matter, as wwe are faking exceptional output
- url = "fakejunk"
- ci.push_mirror_contents(None, url, None)
+ monkeypatch.setattr(spack.ci, "_push_to_build_cache", _push_to_build_cache)
- captured = capsys.readouterr()
- std_out = captured[0]
- expect_msg = "Permission problem writing to {0}".format(url)
-
- assert expect_msg in std_out
+ # Input doesn't matter, as we are faking exceptional output
+ url = tmp_path.as_uri()
+ ci.push_to_build_cache(None, url, None)
+ assert f"Permission problem writing to {url}" in capsys.readouterr().err
@pytest.mark.parametrize("match_behavior", ["first", "merge"])
@@ -1461,26 +1496,24 @@ def test_ci_rebuild_index(
working_dir = tmpdir.join("working_dir")
mirror_dir = working_dir.join("mirror")
- mirror_url = "file://{0}".format(mirror_dir.strpath)
+ mirror_url = url_util.path_to_file_url(str(mirror_dir))
- spack_yaml_contents = """
+ spack_yaml_contents = f"""
spack:
- specs:
- - callpath
- mirrors:
- test-mirror: {0}
- ci:
- pipeline-gen:
- - submapping:
- - match:
- - patchelf
- build-job:
- tags:
- - donotcare
- image: donotcare
-""".format(
- mirror_url
- )
+ specs:
+ - callpath
+ mirrors:
+ test-mirror: {mirror_url}
+ ci:
+ pipeline-gen:
+ - submapping:
+ - match:
+ - patchelf
+ build-job:
+ tags:
+ - donotcare
+ image: donotcare
+"""
filename = str(tmpdir.join("spack.yaml"))
with open(filename, "w") as f:
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index d56078a28e..9d2c1cb21d 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -206,9 +206,7 @@ def push_to_url(local_file_path, remote_path, keep_original=True, extra_args=Non
os.remove(local_file_path)
else:
- raise NotImplementedError(
- "Unrecognized URL scheme: {SCHEME}".format(SCHEME=remote_url.scheme)
- )
+ raise NotImplementedError(f"Unrecognized URL scheme: {remote_url.scheme}")
def base_curl_fetch_args(url, timeout=0):
@@ -535,7 +533,7 @@ def list_url(url, recursive=False):
if local_path:
if recursive:
# convert backslash to forward slash as required for URLs
- return [str(PurePosixPath(Path(p))) for p in list(_iter_local_prefix(local_path))]
+ return [str(PurePosixPath(Path(p))) for p in _iter_local_prefix(local_path)]
return [
subpath
for subpath in os.listdir(local_path)