summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorScott Wittenburg <scott.wittenburg@kitware.com>2022-10-17 09:45:09 -0600
committerGitHub <noreply@github.com>2022-10-17 09:45:09 -0600
commit29df7e9be356b1c6eb84647b556beae5f7cec933 (patch)
treebf3358c86042581a4c896c59721e73811430a4da /lib
parentea80113d0f2ba9ca1a60f06c77a9bb1c88824b1f (diff)
downloadspack-29df7e9be356b1c6eb84647b556beae5f7cec933.tar.gz
spack-29df7e9be356b1c6eb84647b556beae5f7cec933.tar.bz2
spack-29df7e9be356b1c6eb84647b556beae5f7cec933.tar.xz
spack-29df7e9be356b1c6eb84647b556beae5f7cec933.zip
Support spackbot rebuilding all specs from source (#32596)
Support spackbot rebuilding all specs from source when asked (with "rebuild everything") - Allow overriding --prune-dag cli opt with env var - Use job variable to optionally prevent rebuild jobs early exit behavior - ci rebuild: Use new install argument to insist deps are always installed from binary, but package is only installed from source. - gitlab: fix bug w/ untouched pruning - ci rebuild: install from hash rather than json file - When doing a "rebuild everything" pipeline, make sure that each install job only consumes binary dependencies from the mirror being populated by the current pipeline. This avoids using, e.g. binaries from develop, when rebuilding everything on a PR. - When running a pipeline to rebuild everything, do not die because we generated a hash on the broken specs list. Instead only warn in that case. - bugfix: Replace broken no-args tty.die() with sys.exit(1)
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/ci.py34
-rw-r--r--lib/spack/spack/cmd/ci.py53
2 files changed, 66 insertions, 21 deletions
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index e471e04209..0741485656 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -12,6 +12,7 @@ import re
import shutil
import stat
import subprocess
+import sys
import tempfile
import time
import zipfile
@@ -626,11 +627,11 @@ def generate_gitlab_ci_yaml(
cdash_handler = CDashHandler(yaml_root.get("cdash")) if "cdash" in yaml_root else None
build_group = cdash_handler.build_group if cdash_handler else None
- prune_untouched_packages = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
- if prune_untouched_packages:
+ prune_untouched_packages = False
+ spack_prune_untouched = os.environ.get("SPACK_PRUNE_UNTOUCHED", None)
+ if spack_prune_untouched is not None and spack_prune_untouched.lower() == "true":
# Requested to prune untouched packages, but assume we won't do that
# unless we're actually in a git repo.
- prune_untouched_packages = False
rev1, rev2 = get_change_revisions()
tty.debug("Got following revisions: rev1={0}, rev2={1}".format(rev1, rev2))
if rev1 and rev2:
@@ -646,6 +647,14 @@ def generate_gitlab_ci_yaml(
for s in affected_specs:
tty.debug(" {0}".format(s.name))
+ # Allow overriding --prune-dag cli opt with environment variable
+ prune_dag_override = os.environ.get("SPACK_PRUNE_UP_TO_DATE", None)
+ if prune_dag_override is not None:
+ prune_dag = True if prune_dag_override.lower() == "true" else False
+
+ # If we are not doing any kind of pruning, we are rebuilding everything
+ rebuild_everything = not prune_dag and not prune_untouched_packages
+
# Downstream jobs will "need" (depend on, for both scheduling and
# artifacts, which include spack.lock file) this pipeline generation
# job by both name and pipeline id. If those environment variables
@@ -1298,6 +1307,8 @@ def generate_gitlab_ci_yaml(
"SPACK_LOCAL_MIRROR_DIR": rel_local_mirror_dir,
"SPACK_PIPELINE_TYPE": str(spack_pipeline_type),
"SPACK_CI_STACK_NAME": os.environ.get("SPACK_CI_STACK_NAME", "None"),
+ "SPACK_REBUILD_CHECK_UP_TO_DATE": str(prune_dag),
+ "SPACK_REBUILD_EVERYTHING": str(rebuild_everything),
}
if remote_mirror_override:
@@ -1357,7 +1368,9 @@ def generate_gitlab_ci_yaml(
if known_broken_specs_encountered:
tty.error("This pipeline generated hashes known to be broken on develop:")
display_broken_spec_messages(broken_specs_url, known_broken_specs_encountered)
- tty.die()
+
+ if not rebuild_everything:
+ sys.exit(1)
with open(output_file, "w") as outf:
outf.write(syaml.dump_config(sorted_output, default_flow_style=True))
@@ -1575,6 +1588,19 @@ def push_mirror_contents(env, specfile_path, mirror_url, sign_binaries):
raise inst
+def remove_other_mirrors(mirrors_to_keep, scope=None):
+ """Remove all mirrors from the given config scope, the exceptions being
+ any listed in in mirrors_to_keep, which is a list of mirror urls.
+ """
+ mirrors_to_remove = []
+ for name, mirror_url in spack.config.get("mirrors", scope=scope).items():
+ if mirror_url not in mirrors_to_keep:
+ mirrors_to_remove.append(name)
+
+ for mirror_name in mirrors_to_remove:
+ spack.mirror.remove(mirror_name, scope)
+
+
def copy_files_to_artifacts(src, artifacts_dir):
"""
Copy file(s) to the given artifacts directory
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
index cd7a0bb767..70016a29e8 100644
--- a/lib/spack/spack/cmd/ci.py
+++ b/lib/spack/spack/cmd/ci.py
@@ -284,6 +284,7 @@ def ci_rebuild(args):
remote_mirror_override = get_env_var("SPACK_REMOTE_MIRROR_OVERRIDE")
remote_mirror_url = get_env_var("SPACK_REMOTE_MIRROR_URL")
spack_ci_stack_name = get_env_var("SPACK_CI_STACK_NAME")
+ rebuild_everything = get_env_var("SPACK_REBUILD_EVERYTHING")
# Construct absolute paths relative to current $CI_PROJECT_DIR
ci_project_dir = get_env_var("CI_PROJECT_DIR")
@@ -325,6 +326,8 @@ def ci_rebuild(args):
)
)
+ full_rebuild = True if rebuild_everything and rebuild_everything.lower() == "true" else False
+
# If no override url exists, then just push binary package to the
# normal remote mirror url.
buildcache_mirror_url = remote_mirror_override or remote_mirror_url
@@ -448,6 +451,8 @@ def ci_rebuild(args):
fd.write(spack_info.encode("utf8"))
fd.write(b"\n")
+ pipeline_mirrors = []
+
# If we decided there should be a temporary storage mechanism, add that
# mirror now so it's used when we check for a hash match already
# built for this spec.
@@ -455,22 +460,29 @@ def ci_rebuild(args):
spack.mirror.add(
spack_ci.TEMP_STORAGE_MIRROR_NAME, pipeline_mirror_url, cfg.default_modify_scope()
)
+ pipeline_mirrors.append(pipeline_mirror_url)
# Check configured mirrors for a built spec with a matching hash
mirrors_to_check = None
- if remote_mirror_override and spack_pipeline_type == "spack_protected_branch":
- # Passing "mirrors_to_check" below means we *only* look in the override
- # mirror to see if we should skip building, which is what we want.
- mirrors_to_check = {"override": remote_mirror_override}
-
- # Adding this mirror to the list of configured mirrors means dependencies
- # could be installed from either the override mirror or any other configured
- # mirror (e.g. remote_mirror_url which is defined in the environment or
- # pipeline_mirror_url), which is also what we want.
- spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
-
- matches = bindist.get_mirrors_for_spec(
- job_spec, mirrors_to_check=mirrors_to_check, index_only=False
+ if remote_mirror_override:
+ if spack_pipeline_type == "spack_protected_branch":
+ # Passing "mirrors_to_check" below means we *only* look in the override
+ # mirror to see if we should skip building, which is what we want.
+ mirrors_to_check = {"override": remote_mirror_override}
+
+ # Adding this mirror to the list of configured mirrors means dependencies
+ # could be installed from either the override mirror or any other configured
+ # mirror (e.g. remote_mirror_url which is defined in the environment or
+ # pipeline_mirror_url), which is also what we want.
+ spack.mirror.add("mirror_override", remote_mirror_override, cfg.default_modify_scope())
+ pipeline_mirrors.append(remote_mirror_override)
+
+ matches = (
+ None
+ if full_rebuild
+ else bindist.get_mirrors_for_spec(
+ job_spec, mirrors_to_check=mirrors_to_check, index_only=False
+ )
)
if matches:
@@ -493,6 +505,13 @@ def ci_rebuild(args):
# Now we are done and successful
sys.exit(0)
+ # Before beginning the install, if this is a "rebuild everything" pipeline, we
+ # only want to keep the mirror being used by the current pipeline as it's binary
+ # package destination. This ensures that the when we rebuild everything, we only
+ # consume binary dependencies built in this pipeline.
+ if full_rebuild:
+ spack_ci.remove_other_mirrors(pipeline_mirrors, cfg.default_modify_scope())
+
# No hash match anywhere means we need to rebuild spec
# Start with spack arguments
@@ -507,6 +526,8 @@ def ci_rebuild(args):
"install",
"--show-log-on-error", # Print full log on fails
"--keep-stage",
+ "--use-buildcache",
+ "dependencies:only,package:never",
]
)
@@ -525,10 +546,8 @@ def ci_rebuild(args):
if compiler_action != "FIND_ANY":
install_args.append("--no-add")
- # TODO: once we have the concrete spec registry, use the DAG hash
- # to identify the spec to install, rather than the concrete spec
- # json file.
- install_args.extend(["-f", job_spec_json_path])
+ # Identify spec to install by hash
+ install_args.append("/{0}".format(job_spec.dag_hash()))
tty.debug("Installing {0} from source".format(job_spec.name))
install_exit_code = spack_ci.process_command("install", install_args, repro_dir)