diff options
Diffstat (limited to 'lib')
-rw-r--r-- | lib/spack/spack/ci.py | 80 | ||||
-rw-r--r-- | lib/spack/spack/cmd/buildcache.py | 78 |
2 files changed, 113 insertions, 45 deletions
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index b1c777bb8b..198e787dea 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -36,6 +36,7 @@ import spack.repo import spack.util.executable as exe import spack.util.gpg as gpg_util import spack.util.spack_yaml as syaml +import spack.util.url as url_util import spack.util.web as web_util from spack.error import SpackError from spack.reporters.cdash import CDash @@ -644,8 +645,6 @@ def generate_gitlab_ci_yaml( # Values: "spack_pull_request", "spack_protected_branch", or not set spack_pipeline_type = os.environ.get("SPACK_PIPELINE_TYPE", None) - spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None) - if "mirrors" not in yaml_root or len(yaml_root["mirrors"].values()) < 1: tty.die("spack ci generate requires an env containing a mirror") @@ -653,6 +652,12 @@ def generate_gitlab_ci_yaml( mirror_urls = [url for url in ci_mirrors.values()] remote_mirror_url = mirror_urls[0] + spack_buildcache_copy = os.environ.get("SPACK_COPY_BUILDCACHE", None) + if spack_buildcache_copy: + buildcache_copies = {} + buildcache_copy_src_prefix = remote_mirror_override or remote_mirror_url + buildcache_copy_dest_prefix = spack_buildcache_copy + # Check for a list of "known broken" specs that we should not bother # trying to build. broken_specs_url = "" @@ -1020,6 +1025,36 @@ def generate_gitlab_ci_yaml( "{0} ({1})".format(release_spec, release_spec_dag_hash) ) + # Only keep track of these if we are copying rebuilt cache entries + if spack_buildcache_copy: + # TODO: This assumes signed version of the spec + buildcache_copies[release_spec_dag_hash] = [ + { + "src": url_util.join( + buildcache_copy_src_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_name(release_spec, ".spec.json.sig"), + ), + "dest": url_util.join( + buildcache_copy_dest_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_name(release_spec, ".spec.json.sig"), + ), + }, + { + "src": url_util.join( + buildcache_copy_src_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_path_name(release_spec, ".spack"), + ), + "dest": url_util.join( + buildcache_copy_dest_prefix, + bindist.build_cache_relative_path(), + bindist.tarball_path_name(release_spec, ".spack"), + ), + }, + ] + if artifacts_root: job_dependencies.append( {"job": generate_job_name, "pipeline": "{0}".format(parent_pipeline_id)} @@ -1197,32 +1232,6 @@ def generate_gitlab_ci_yaml( output_object["sign-pkgs"] = signing_job - if spack_buildcache_copy: - # Generate a job to copy the contents from wherever the builds are getting - # pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment - # variable. - src_url = remote_mirror_override or remote_mirror_url - dest_url = spack_buildcache_copy - - stage_names.append("stage-copy-buildcache") - copy_job = { - "stage": "stage-copy-buildcache", - "tags": ["spack", "public", "medium", "aws", "x86_64"], - "image": "ghcr.io/spack/python-aws-bash:0.0.1", - "when": "on_success", - "interruptible": True, - "retry": service_job_retries, - "script": [ - ". ./share/spack/setup-env.sh", - "spack --version", - "aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}".format( - src_url, dest_url - ), - ], - } - - output_object["copy-mirror"] = copy_job - if rebuild_index_enabled: # Add a final job to regenerate the index stage_names.append("stage-rebuild-index") @@ -1286,6 +1295,21 @@ def generate_gitlab_ci_yaml( if spack_stack_name: output_object["variables"]["SPACK_CI_STACK_NAME"] = spack_stack_name + if spack_buildcache_copy: + # Write out the file describing specs that should be copied + copy_specs_dir = os.path.join(pipeline_artifacts_dir, "specs_to_copy") + + if not os.path.exists(copy_specs_dir): + os.makedirs(copy_specs_dir) + + copy_specs_file = os.path.join( + copy_specs_dir, + "copy_{}_specs.json".format(spack_stack_name if spack_stack_name else "rebuilt"), + ) + + with open(copy_specs_file, "w") as fd: + fd.write(json.dumps(buildcache_copies)) + sorted_output = {} for output_key, output_value in sorted(output_object.items()): sorted_output[output_key] = output_value diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 4c1e4d4837..6aaa5eb1c7 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -2,6 +2,8 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import glob +import json import os import shutil import sys @@ -271,7 +273,12 @@ def setup_parser(subparser): # Sync buildcache entries from one mirror to another sync = subparsers.add_parser("sync", help=sync_fn.__doc__) - source = sync.add_mutually_exclusive_group(required=True) + sync.add_argument( + "--manifest-glob", + default=None, + help="A quoted glob pattern identifying copy manifest files", + ) + source = sync.add_mutually_exclusive_group(required=False) source.add_argument( "--src-directory", metavar="DIRECTORY", type=str, help="Source mirror as a local file path" ) @@ -281,7 +288,7 @@ def setup_parser(subparser): source.add_argument( "--src-mirror-url", metavar="MIRROR_URL", type=str, help="URL of the source mirror" ) - dest = sync.add_mutually_exclusive_group(required=True) + dest = sync.add_mutually_exclusive_group(required=False) dest.add_argument( "--dest-directory", metavar="DIRECTORY", @@ -614,6 +621,31 @@ def copy_fn(args): shutil.copyfile(specfile_src_path_yaml, specfile_dest_path_yaml) +def copy_buildcache_file(src_url, dest_url, local_path=None): + """Copy from source url to destination url""" + tmpdir = None + + if not local_path: + tmpdir = tempfile.mkdtemp() + local_path = os.path.join(tmpdir, os.path.basename(src_url)) + + try: + temp_stage = Stage(src_url, path=os.path.dirname(local_path)) + try: + temp_stage.create() + temp_stage.fetch() + web_util.push_to_url(local_path, dest_url, keep_original=True) + except web_util.FetchError as e: + # Expected, since we have to try all the possible extensions + tty.debug("no such file: {0}".format(src_url)) + tty.debug(e) + finally: + temp_stage.destroy() + finally: + if tmpdir and os.path.exists(tmpdir): + shutil.rmtree(tmpdir) + + def sync_fn(args): """Syncs binaries (and associated metadata) from one mirror to another. Requires an active environment in order to know which specs to sync. @@ -622,6 +654,10 @@ def sync_fn(args): src (str): Source mirror URL dest (str): Destination mirror URL """ + if args.manifest_glob: + manifest_copy(glob.glob(args.manifest_glob)) + return 0 + # Figure out the source mirror source_location = None if args.src_directory: @@ -687,8 +723,9 @@ def sync_fn(args): buildcache_rel_paths.extend( [ os.path.join(build_cache_dir, bindist.tarball_path_name(s, ".spack")), - os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")), + os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json.sig")), os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.json")), + os.path.join(build_cache_dir, bindist.tarball_name(s, ".spec.yaml")), ] ) @@ -701,24 +738,31 @@ def sync_fn(args): dest_url = url_util.join(dest_mirror_url, rel_path) tty.debug("Copying {0} to {1} via {2}".format(src_url, dest_url, local_path)) - - stage = Stage( - src_url, name="temporary_file", path=os.path.dirname(local_path), keep=True - ) - - try: - stage.create() - stage.fetch() - web_util.push_to_url(local_path, dest_url, keep_original=True) - except web_util.FetchError as e: - tty.debug("spack buildcache unable to sync {0}".format(rel_path)) - tty.debug(e) - finally: - stage.destroy() + copy_buildcache_file(src_url, dest_url, local_path=local_path) finally: shutil.rmtree(tmpdir) +def manifest_copy(manifest_file_list): + """Read manifest files containing information about specific specs to copy + from source to destination, remove duplicates since any binary packge for + a given hash should be the same as any other, and copy all files specified + in the manifest files.""" + deduped_manifest = {} + + for manifest_path in manifest_file_list: + with open(manifest_path) as fd: + manifest = json.loads(fd.read()) + for spec_hash, copy_list in manifest.items(): + # Last duplicate hash wins + deduped_manifest[spec_hash] = copy_list + + for spec_hash, copy_list in deduped_manifest.items(): + for copy_file in copy_list: + tty.debug("copying {0} to {1}".format(copy_file["src"], copy_file["dest"])) + copy_buildcache_file(copy_file["src"], copy_file["dest"]) + + def update_index(mirror_url, update_keys=False): mirror = spack.mirror.MirrorCollection().lookup(mirror_url) outdir = url_util.format(mirror.push_url) |