diff options
author | Scott Wittenburg <scott.wittenburg@kitware.com> | 2022-09-01 15:29:44 -0600 |
---|---|---|
committer | GitHub <noreply@github.com> | 2022-09-01 15:29:44 -0600 |
commit | 6239198d65d20d8dfbd15bd617168dd92523e0b7 (patch) | |
tree | 6e2fe464a1000901639616c3e2e97bed7a4b0e53 /lib/spack/spack/ci.py | |
parent | d9313cf561ff6866971bb7fb488e0ef1b6725b6e (diff) | |
download | spack-6239198d65d20d8dfbd15bd617168dd92523e0b7.tar.gz spack-6239198d65d20d8dfbd15bd617168dd92523e0b7.tar.bz2 spack-6239198d65d20d8dfbd15bd617168dd92523e0b7.tar.xz spack-6239198d65d20d8dfbd15bd617168dd92523e0b7.zip |
Fix cause of checksum failures in public binary mirror (#32407)
Move the copying of the buildcache to a root job that runs after all the child
pipelines have finished, so that the operation can be coordinated across all
child pipelines to remove the possibility of race conditions during potentially
simlutandous copies. This lets us ensure the .spec.json.sig and .spack files
for any spec in the root mirror always come from the same child pipeline
mirror (though which pipeline is arbitrary). It also allows us to avoid copying
of duplicates, which we now do.
Diffstat (limited to 'lib/spack/spack/ci.py')
-rw-r--r-- | lib/spack/spack/ci.py | 80 |
1 files changed, 52 insertions, 28 deletions
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index b1c777bb8b..198e787dea 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -36,6 +36,7 @@ import spack.repo import spack.util.executable as exe import spack.util.gpg as gpg_util import spack.util.spack_yaml as syaml +import spack.util.url as url_util import spack.util.web as web_util from spack.error import SpackError from spack.reporters.cdash import CDash @@ -644,8 +645,6 @@ def generate_gitlab_ci_yaml( # Values: "spack_pull_request", "spack_protected_branch", or not set spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None) - spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None) - if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1: tty.die("spack ci generate requires an env containing a mirror") @@ -653,6 +652,12 @@ def generate_gitlab_ci_yaml( mirror_urls = [url for url in ci_mirrors.values()] remote_mirror_url = mirror_urls[0] + spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None) + if spack_buildcache_copy: + buildcache_copies = {} + buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url + buildcache_copy_dest_prefix = spack_buildcache_copy + # Check for a list of "known broken" specs that we should not bother # trying to build. broken_specs_url = "" @@ -1020,6 +1025,36 @@ def generate_gitlab_ci_yaml( "{0} ({1})".format(release_spec, release_spec_dag_hash) ) + # Only keep track of these if we are copying rebuilt cache entries + if spack_buildcache_copy: + # TODO: This assumes signed version of the spec + buildcache_copies[release_spec_dag_hash] = [ + { + "src": url_util.join( + buildcache_copy_src_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_name(release_spec, ".spec.json.sig"), + ), + "dest": url_util.join( + buildcache_copy_dest_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_name(release_spec, ".spec.json.sig"), + ), + }, + { + "src": url_util.join( + buildcache_copy_src_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_path_name(release_spec, ".spack"), + ), + "dest": url_util.join( + buildcache_copy_dest_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_path_name(release_spec, ".spack"), + ), + }, + ] + if artifacts_root: job_dependencies.append( {"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)} @@ -1197,32 +1232,6 @@ def generate_gitlab_ci_yaml( output_object["sign-pkgs"] = signing_job - if spack_buildcache_copy: - # Generate a job to copy the contents from wherever the builds are getting - # pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment - # variable. - src_url = remote_mirror_override or remote_mirror_url - dest_url = spack_buildcache_copy - - stage_names.append("stage-copy-buildcache") - copy_job = { - "stage": "stage-copy-buildcache", - "tags": ["spack", "public", "medium", "aws", "x86_64"], - "image": "ghcr.io/spack/python-aws-bash:0.0.1", - "when": "on_success", - "interruptible": True, - "retry": service_job_retries, - "script": [ - ". ./share/spack/setup-env.sh", - "spack --version", - "aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}".format( - src_url, dest_url - ), - ], - } - - output_object["copy-mirror"] = copy_job - if rebuild_index_enabled: # Add a final job to regenerate the index stage_names.append("stage-rebuild-index") @@ -1286,6 +1295,21 @@ def generate_gitlab_ci_yaml( if spack_stack_name: output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name + if spack_buildcache_copy: + # Write out the file describing specs that should be copied + copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy") + + if not os.path.exists(copy_specs_dir): + os.makedirs(copy_specs_dir) + + copy_specs_file = os.path.join( + copy_specs_dir, + "copy_{}_specs.json".format(spack_stack_name if spack_stack_name else "rebuilt"), + ) + + with open(copy_specs_file, "w") as fd: + fd.write(json.dumps(buildcache_copies)) + sorted_output = {} for output_key, output_value in sorted(output_object.items()): sorted_output[output_key] = output_value |