summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorScott Wittenburg <scott.wittenburg@kitware.com>2018-12-17 17:40:16 -0700
committerPeter Scheibel <scheibel1@llnl.gov>2019-02-21 15:37:35 -0600
commit5600c9f0d98f620c5ac57f24cd3c8fffee9d0a47 (patch)
tree0a189dd3296d472e8eff56c9d4ac10f0a43794d8 /lib
parenta6e8e889b291a5a048cbdf5497f19216a4acb25e (diff)
downloadspack-5600c9f0d98f620c5ac57f24cd3c8fffee9d0a47.tar.gz
spack-5600c9f0d98f620c5ac57f24cd3c8fffee9d0a47.tar.bz2
spack-5600c9f0d98f620c5ac57f24cd3c8fffee9d0a47.tar.xz
spack-5600c9f0d98f620c5ac57f24cd3c8fffee9d0a47.zip
release workflow: Add build scripts for jobs and means to upload pkgs
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/cmd/upload_s3.py212
1 files changed, 212 insertions, 0 deletions
diff --git a/lib/spack/spack/cmd/upload_s3.py b/lib/spack/spack/cmd/upload_s3.py
new file mode 100644
index 0000000000..ba3f0688ae
--- /dev/null
+++ b/lib/spack/spack/cmd/upload_s3.py
@@ -0,0 +1,212 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+# TODO: This will be merged into the buildcache command once
+# everything is working.
+
+import os
+import re
+import sys
+
+try:
+ import boto3
+ import botocore
+ have_boto3_support = True
+except ImportError:
+ have_boto3_support = False
+
+import llnl.util.tty as tty
+
+from spack.error import SpackError
+import spack.tengine as template_engine
+from spack.spec import Spec
+
+
+import spack.binary_distribution as bindist
+
+
+description = "temporary command to upload buildcaches to 's3.spack.io'"
+section = "packaging"
+level = "long"
+
+
+def setup_parser(subparser):
+ setup_parser.parser = subparser
+ subparsers = subparser.add_subparsers(help='upload-s3 sub-commands')
+
+ # sub-command to upload a built spec to s3
+ spec = subparsers.add_parser('spec', help=upload_spec.__doc__)
+
+ spec.add_argument('-s', '--spec', default=None,
+ help='Spec to upload')
+
+ spec.add_argument('-y', '--spec-yaml', default=None,
+ help='Path to spec yaml file containing spec to upload')
+
+ spec.add_argument('-b', '--base-dir', default=None,
+ help='Path to root of buildcaches')
+
+ spec.add_argument('-e', '--endpoint-url',
+ default='https://s3.spack.io', help='URL of mirror')
+
+ spec.set_defaults(func=upload_spec)
+
+ # sub-command to update the index of a buildcache on s3
+ index = subparsers.add_parser('index', help=update_index.__doc__)
+
+ index.add_argument('-e', '--endpoint-url',
+ default='https://s3.spack.io', help='URL of mirror')
+
+ index.set_defaults(func=update_index)
+
+
+def get_s3_session(endpoint_url):
+ if not have_boto3_support:
+ raise SpackError('boto3 module not available')
+
+ session = boto3.Session()
+ s3 = session.resource('s3')
+
+ bucket_names = []
+ for bucket in s3.buckets.all():
+ bucket_names.append(bucket.name)
+
+ if len(bucket_names) > 1:
+ raise SpackError('More than one bucket associated with credentials')
+
+ bucket_name = bucket_names[0]
+
+ return s3, bucket_name
+
+
+def update_index(args):
+ """Update the index of an s3 buildcache"""
+ s3, bucket_name = get_s3_session(args.endpoint_url)
+
+ bucket = s3.Bucket(bucket_name)
+ exists = True
+
+ try:
+ s3.meta.client.head_bucket(Bucket=bucket_name)
+ except botocore.exceptions.ClientError as e:
+ # If a client error is thrown, then check that it was a 404 error.
+ # If it was a 404 error, then the bucket does not exist.
+ error_code = e.response['Error']['Code']
+ if error_code == '404':
+ exists = False
+
+ if not exists:
+ tty.error('S3 bucket "{0}" does not exist'.format(bucket_name))
+ sys.exit(1)
+
+ build_cache_dir = os.path.join(
+ 'mirror', bindist.build_cache_relative_path())
+
+ spec_yaml_regex = re.compile('{0}/(.+\\.spec\\.yaml)$'.format(
+ build_cache_dir))
+ spack_regex = re.compile('{0}/([^/]+)/.+\\.spack$'.format(
+ build_cache_dir))
+
+ top_level_keys = set()
+
+ for key in bucket.objects.all():
+ m = spec_yaml_regex.search(key.key)
+ if m:
+ top_level_keys.add(m.group(1))
+ print(m.group(1))
+ continue
+
+ m = spack_regex.search(key.key)
+ if m:
+ top_level_keys.add(m.group(1))
+ print(m.group(1))
+ continue
+
+ index_data = {
+ 'top_level_keys': top_level_keys,
+ }
+
+ env = template_engine.make_environment()
+ template_dir = 'misc'
+ index_template = os.path.join(template_dir, 'buildcache_index.html')
+ t = env.get_template(index_template)
+ contents = t.render(index_data)
+
+ index_key = os.path.join(build_cache_dir, 'index.html')
+
+ tty.debug('Generated index:')
+ tty.debug(contents)
+ tty.debug('Pushing it to {0} -> {1}'.format(bucket_name, index_key))
+
+ s3_obj = s3.Object(bucket_name, index_key)
+ s3_obj.put(Body=contents, ACL='public-read')
+
+
+def upload_spec(args):
+ """Upload a spec to s3 bucket"""
+ if not args.spec and not args.spec_yaml:
+ tty.error('Cannot upload spec without spec arg or path to spec yaml')
+ sys.exit(1)
+
+ if not args.base_dir:
+ tty.error('No base directory for buildcache specified')
+ sys.exit(1)
+
+ if args.spec:
+ try:
+ spec = Spec(args.spec)
+ spec.concretize()
+ except Exception:
+ tty.error('Unable to concrectize spec from string {0}'.format(
+ args.spec))
+ sys.exit(1)
+ else:
+ try:
+ with open(args.spec_yaml, 'r') as fd:
+ spec = Spec.from_yaml(fd.read())
+ except Exception:
+ tty.error('Unable to concrectize spec from yaml {0}'.format(
+ args.spec_yaml))
+ sys.exit(1)
+
+ s3, bucket_name = get_s3_session(args.endpoint_url)
+
+ build_cache_dir = bindist.build_cache_relative_path()
+
+ tarball_key = os.path.join(
+ build_cache_dir, bindist.tarball_path_name(spec, '.spack'))
+ tarball_path = os.path.join(args.base_dir, tarball_key)
+
+ specfile_key = os.path.join(
+ build_cache_dir, bindist.tarball_name(spec, '.spec.yaml'))
+ specfile_path = os.path.join(args.base_dir, specfile_key)
+
+ cdashidfile_key = os.path.join(
+ build_cache_dir, bindist.tarball_name(spec, '.cdashid'))
+ cdashidfile_path = os.path.join(args.base_dir, cdashidfile_key)
+
+ tty.msg('Uploading {0}'.format(tarball_key))
+ s3.meta.client.upload_file(
+ tarball_path, bucket_name,
+ os.path.join('mirror', tarball_key),
+ ExtraArgs={'ACL': 'public-read'})
+
+ tty.msg('Uploading {0}'.format(specfile_key))
+ s3.meta.client.upload_file(
+ specfile_path, bucket_name,
+ os.path.join('mirror', specfile_key),
+ ExtraArgs={'ACL': 'public-read'})
+
+ if os.path.exists(cdashidfile_path):
+ tty.msg('Uploading {0}'.format(cdashidfile_key))
+ s3.meta.client.upload_file(
+ cdashidfile_path, bucket_name,
+ os.path.join('mirror', cdashidfile_key),
+ ExtraArgs={'ACL': 'public-read'})
+
+
+def upload_s3(parser, args):
+ if args.func:
+ args.func(args)