summaryrefslogtreecommitdiff
path: root/lib/spack/spack/cmd/ci.py
diff options
context:
space:
mode:
Diffstat (limited to 'lib/spack/spack/cmd/ci.py')
-rw-r--r--lib/spack/spack/cmd/ci.py204
1 files changed, 115 insertions, 89 deletions
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
index e0b9b6e0fa..3dd97e53b0 100644
--- a/lib/spack/spack/cmd/ci.py
+++ b/lib/spack/spack/cmd/ci.py
@@ -138,6 +138,7 @@ def ci_rebuild(args):
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
related_builds = get_env_var('SPACK_RELATED_BUILDS_CDASH')
pr_env_var = get_env_var('SPACK_IS_PR_PIPELINE')
+ pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
gitlab_ci = None
if 'gitlab-ci' in yaml_root:
@@ -180,8 +181,6 @@ def ci_rebuild(args):
tty.debug('job_spec_pkg_name = {0}'.format(job_spec_pkg_name))
tty.debug('compiler_action = {0}'.format(compiler_action))
- spack_cmd = exe.which('spack')
-
cdash_report_dir = os.path.join(ci_artifact_dir, 'cdash_report')
temp_dir = os.path.join(ci_artifact_dir, 'jobs_scratch_dir')
job_log_dir = os.path.join(temp_dir, 'logs')
@@ -235,20 +234,17 @@ def ci_rebuild(args):
for next_entry in directory_list:
tty.debug(' {0}'.format(next_entry))
- # Make a copy of the environment file, so we can overwrite the changed
- # version in between the two invocations of "spack install"
- env_src_path = env.manifest_path
- env_dirname = os.path.dirname(env_src_path)
- env_filename = os.path.basename(env_src_path)
- env_copyname = '{0}_BACKUP'.format(env_filename)
- env_dst_path = os.path.join(env_dirname, env_copyname)
- shutil.copyfile(env_src_path, env_dst_path)
-
tty.debug('job concrete spec path: {0}'.format(job_spec_yaml_path))
if signing_key:
spack_ci.import_signing_key(signing_key)
+ can_sign = spack_ci.can_sign_binaries()
+ sign_binaries = can_sign and spack_is_pr_pipeline is False
+
+ can_verify = spack_ci.can_verify_binaries()
+ verify_binaries = can_verify and spack_is_pr_pipeline is False
+
spack_ci.configure_compilers(compiler_action)
spec_map = spack_ci.get_concrete_specs(
@@ -273,27 +269,76 @@ def ci_rebuild(args):
with open(root_spec_yaml_path, 'w') as fd:
fd.write(spec_map['root'].to_yaml(hash=ht.build_hash))
- if bindist.needs_rebuild(job_spec, remote_mirror_url, True):
- # Binary on remote mirror is not up to date, we need to rebuild
- # it.
- #
- # FIXME: ensure mirror precedence causes this local mirror to
- # be chosen ahead of the remote one when installing deps
+ # TODO: Refactor the spack install command so it's easier to use from
+ # python modules. Currently we use "exe.which('spack')" to make it
+ # easier to install packages from here, but it introduces some
+ # problems, e.g. if we want the spack command to have access to the
+ # mirrors we're configuring, then we have to use the "spack" command
+ # to add the mirrors too, which in turn means that any code here *not*
+ # using the spack command does *not* have access to the mirrors.
+ spack_cmd = exe.which('spack')
+ mirrors_to_check = {
+ 'ci_remote_mirror': remote_mirror_url,
+ }
+
+ def add_mirror(mirror_name, mirror_url):
+ m_args = ['mirror', 'add', mirror_name, mirror_url]
+ tty.debug('Adding mirror: spack {0}'.format(m_args))
+ mirror_add_output = spack_cmd(*m_args)
+ # Workaround: Adding the mirrors above, using "spack_cmd" makes
+ # sure they're available later when we use "spack_cmd" to install
+ # the package. But then we also need to add them to this dict
+ # below, so they're available in this process (we end up having to
+ # pass them to "bindist.get_mirrors_for_spec()")
+ mirrors_to_check[mirror_name] = mirror_url
+ tty.debug('spack mirror add output: {0}'.format(mirror_add_output))
+
+ # Configure mirrors
+ if pr_mirror_url:
+ add_mirror('ci_pr_mirror', pr_mirror_url)
+
+ if enable_artifacts_mirror:
+ add_mirror('ci_artifact_mirror', artifact_mirror_url)
+
+ tty.debug('listing spack mirrors:')
+ spack_cmd('mirror', 'list')
+ spack_cmd('config', 'blame', 'mirrors')
+
+ # Checks all mirrors for a built spec with a matching full hash
+ matches = bindist.get_mirrors_for_spec(
+ job_spec, force=False, full_hash_match=True,
+ mirrors_to_check=mirrors_to_check)
+
+ if matches:
+ # Got at full hash match on at least one configured mirror. All
+ # matches represent the fully up-to-date spec, so should all be
+ # equivalent. If artifacts mirror is enabled, we just pick one
+ # of the matches and download the buildcache files from there to
+ # the artifacts, so they're available to be used by dependent
+ # jobs in subsequent stages.
+ tty.debug('No need to rebuild {0}'.format(job_spec_pkg_name))
if enable_artifacts_mirror:
- mirror_add_output = spack_cmd(
- 'mirror', 'add', 'local_mirror', artifact_mirror_url)
- tty.debug('spack mirror add:')
- tty.debug(mirror_add_output)
+ matching_mirror = matches[0]['mirror_url']
+ tty.debug('Getting {0} buildcache from {1}'.format(
+ job_spec_pkg_name, matching_mirror))
+ tty.debug('Downloading to {0}'.format(build_cache_dir))
+ buildcache.download_buildcache_files(
+ job_spec, build_cache_dir, True, matching_mirror)
+ else:
+ # No full hash match anywhere means we need to rebuild spec
- mirror_list_output = spack_cmd('mirror', 'list')
- tty.debug('listing spack mirrors:')
- tty.debug(mirror_list_output)
+ # Build up common install arguments
+ install_args = [
+ '-d', '-v', '-k', 'install',
+ '--keep-stage',
+ '--require-full-hash-match',
+ ]
- # 2) build up install arguments
- install_args = ['-d', '-v', '-k', 'install', '--keep-stage']
+ if not verify_binaries:
+ install_args.append('--no-check-signature')
- # 3) create/register a new build on CDash (if enabled)
- cdash_args = []
+ # Add arguments to create + register a new build on CDash (if
+ # enabled)
if enable_cdash:
tty.debug('Registering build with CDash')
(cdash_build_id,
@@ -304,82 +349,63 @@ def ci_rebuild(args):
cdash_upload_url = '{0}/submit.php?project={1}'.format(
cdash_base_url, cdash_project_enc)
- cdash_args = [
+ install_args.extend([
'--cdash-upload-url', cdash_upload_url,
'--cdash-build', cdash_build_name,
'--cdash-site', cdash_site,
'--cdash-buildstamp', cdash_build_stamp,
- ]
+ ])
- spec_cli_arg = [job_spec_yaml_path]
+ install_args.append(job_spec_yaml_path)
- tty.debug('Installing package')
+ tty.debug('Installing {0} from source'.format(job_spec.name))
try:
- # Two-pass install is intended to avoid spack trying to
- # install from buildcache even though the locally computed
- # full hash is different than the one stored in the spec.yaml
- # file on the remote mirror.
- first_pass_args = install_args + [
- '--cache-only',
- '--only',
- 'dependencies',
- ]
- first_pass_args.extend(spec_cli_arg)
- tty.debug('First pass install arguments: {0}'.format(
- first_pass_args))
- spack_cmd(*first_pass_args)
-
- # Overwrite the changed environment file so it doesn't break
- # the next install invocation.
- tty.debug('Copying {0} to {1}'.format(
- env_dst_path, env_src_path))
- shutil.copyfile(env_dst_path, env_src_path)
-
- second_pass_args = install_args + [
- '--no-cache',
- '--only',
- 'package',
- ]
- second_pass_args.extend(cdash_args)
- second_pass_args.extend(spec_cli_arg)
- tty.debug('Second pass install arguments: {0}'.format(
- second_pass_args))
- spack_cmd(*second_pass_args)
- except Exception as inst:
- tty.error('Caught exception during install:')
- tty.error(inst)
+ tty.debug('spack install arguments: {0}'.format(
+ install_args))
+ spack_cmd(*install_args)
+ finally:
+ spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
+
+ # Create buildcache on remote mirror, either on pr-specific
+ # mirror or on mirror defined in spack environment
+ if spack_is_pr_pipeline:
+ buildcache_mirror_url = pr_mirror_url
+ else:
+ buildcache_mirror_url = remote_mirror_url
- spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
-
- # 4) create buildcache on remote mirror, but not if this is
- # running to test a spack PR
- if not spack_is_pr_pipeline:
+ try:
spack_ci.push_mirror_contents(
- env, job_spec, job_spec_yaml_path, remote_mirror_url,
- cdash_build_id)
-
- # 5) create another copy of that buildcache on "local artifact
- # mirror" (only done if cash reporting is enabled)
+ env, job_spec, job_spec_yaml_path, buildcache_mirror_url,
+ cdash_build_id, sign_binaries)
+ except Exception as inst:
+ # If the mirror we're pushing to is on S3 and there's some
+ # permissions problem, for example, we can't just target
+ # that exception type here, since users of the
+ # `spack ci rebuild' may not need or want any dependency
+ # on boto3. So we use the first non-boto exception type
+ # in the heirarchy:
+ # boto3.exceptions.S3UploadFailedError
+ # boto3.exceptions.Boto3Error
+ # Exception
+ # BaseException
+ # object
+ err_msg = 'Error msg: {0}'.format(inst)
+ if 'Access Denied' in err_msg:
+ tty.msg('Permission problem writing to mirror')
+ tty.msg(err_msg)
+
+ # Create another copy of that buildcache on "local artifact
+ # mirror" (only done if artifacts buildcache is enabled)
spack_ci.push_mirror_contents(env, job_spec, job_spec_yaml_path,
- artifact_mirror_url, cdash_build_id)
+ artifact_mirror_url, cdash_build_id,
+ sign_binaries)
- # 6) relate this build to its dependencies on CDash (if enabled)
+ # Relate this build to its dependencies on CDash (if enabled)
if enable_cdash:
spack_ci.relate_cdash_builds(
spec_map, cdash_base_url, cdash_build_id, cdash_project,
- artifact_mirror_url or remote_mirror_url)
- else:
- # There is nothing to do here unless "local artifact mirror" is
- # enabled, in which case, we need to download the buildcache to
- # the local artifacts directory to be used by dependent jobs in
- # subsequent stages
- tty.debug('No need to rebuild {0}'.format(job_spec_pkg_name))
- if enable_artifacts_mirror:
- tty.debug('Getting {0} buildcache'.format(job_spec_pkg_name))
- tty.debug('Downloading to {0}'.format(build_cache_dir))
- buildcache.download_buildcache_files(
- job_spec, build_cache_dir, True, remote_mirror_url)
+ artifact_mirror_url or pr_mirror_url or remote_mirror_url)
def ci(parser, args):