summaryrefslogtreecommitdiff
path: root/lib
diff options
context:
space:
mode:
authorScott Wittenburg <scott.wittenburg@kitware.com>2022-05-26 08:31:22 -0600
committerTodd Gamblin <tgamblin@llnl.gov>2022-05-26 09:10:18 -0700
commitca0c9686394d4e5630a36f23d8d3ff016db97e8a (patch)
tree378770d6d4aabc96dd34c3620e7ae711a6f4cf12 /lib
parentd99a1b104752ded8ef4948cfdd79ab4f96751dd6 (diff)
downloadspack-ca0c9686394d4e5630a36f23d8d3ff016db97e8a.tar.gz
spack-ca0c9686394d4e5630a36f23d8d3ff016db97e8a.tar.bz2
spack-ca0c9686394d4e5630a36f23d8d3ff016db97e8a.tar.xz
spack-ca0c9686394d4e5630a36f23d8d3ff016db97e8a.zip
ci: Support secure binary signing on protected pipelines (#30753)
This PR supports the creation of securely signed binaries built from spack develop as well as release branches and tags. Specifically: - remove internal pr mirror url generation logic in favor of buildcache destination on command line - with a single mirror url specified in the spack.yaml, this makes it clearer where binaries from various pipelines are pushed - designate some tags as reserved: ['public', 'protected', 'notary'] - these tags are stripped from all jobs by default and provisioned internally based on pipeline type - update gitlab ci yaml to include pipelines on more protected branches than just develop (so include releases and tags) - binaries from all protected pipelines are pushed into mirrors including the branch name so releases, tags, and develop binaries are kept separate - update rebuild jobs running on protected pipelines to run on special runners provisioned with an intermediate signing key - protected rebuild jobs no longer use "SPACK_SIGNING_KEY" env var to obtain signing key (in fact, final signing key is nowhere available to rebuild jobs) - these intermediate signatures are verified at the end of each pipeline by a new signing job to ensure binaries were produced by a protected pipeline - optionallly schedule a signing/notary job at the end of the pipeline to sign all packges in the mirror - add signing-job-attributes to gitlab-ci section of spack environment to allow configuration - signing job runs on special runner (separate from protected rebuild runners) provisioned with public intermediate key and secret signing key
Diffstat (limited to 'lib')
-rw-r--r--lib/spack/spack/binary_distribution.py18
-rw-r--r--lib/spack/spack/ci.py182
-rw-r--r--lib/spack/spack/cmd/ci.py44
-rw-r--r--lib/spack/spack/schema/gitlab_ci.py1
-rw-r--r--lib/spack/spack/test/cmd/ci.py155
5 files changed, 322 insertions, 78 deletions
diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py
index ac1723a94c..fb4c6ce472 100644
--- a/lib/spack/spack/binary_distribution.py
+++ b/lib/spack/spack/binary_distribution.py
@@ -210,7 +210,7 @@ class BinaryCacheIndex(object):
return spec_list
- def find_built_spec(self, spec):
+ def find_built_spec(self, spec, mirrors_to_check=None):
"""Look in our cache for the built spec corresponding to ``spec``.
If the spec can be found among the configured binary mirrors, a
@@ -225,6 +225,8 @@ class BinaryCacheIndex(object):
Args:
spec (spack.spec.Spec): Concrete spec to find
+ mirrors_to_check: Optional mapping containing mirrors to check. If
+ None, just assumes all configured mirrors.
Returns:
An list of objects containing the found specs and mirror url where
@@ -240,17 +242,23 @@ class BinaryCacheIndex(object):
]
"""
self.regenerate_spec_cache()
- return self.find_by_hash(spec.dag_hash())
+ return self.find_by_hash(spec.dag_hash(), mirrors_to_check=mirrors_to_check)
- def find_by_hash(self, find_hash):
+ def find_by_hash(self, find_hash, mirrors_to_check=None):
"""Same as find_built_spec but uses the hash of a spec.
Args:
find_hash (str): hash of the spec to search
+ mirrors_to_check: Optional mapping containing mirrors to check. If
+ None, just assumes all configured mirrors.
"""
if find_hash not in self._mirrors_for_spec:
return None
- return self._mirrors_for_spec[find_hash]
+ results = self._mirrors_for_spec[find_hash]
+ if not mirrors_to_check:
+ return results
+ mirror_urls = mirrors_to_check.values()
+ return [r for r in results if r['mirror_url'] in mirror_urls]
def update_spec(self, spec, found_list):
"""
@@ -1822,7 +1830,7 @@ def get_mirrors_for_spec(spec=None, mirrors_to_check=None, index_only=False):
tty.debug("No Spack mirrors are currently configured")
return {}
- results = binary_index.find_built_spec(spec)
+ results = binary_index.find_built_spec(spec, mirrors_to_check=mirrors_to_check)
# Maybe we just didn't have the latest information from the mirror, so
# try to fetch directly, unless we are only considering the indices.
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py
index d2eb8634ea..254821cb3d 100644
--- a/lib/spack/spack/ci.py
+++ b/lib/spack/spack/ci.py
@@ -33,7 +33,6 @@ import spack.repo
import spack.util.executable as exe
import spack.util.gpg as gpg_util
import spack.util.spack_yaml as syaml
-import spack.util.url as url_util
import spack.util.web as web_util
from spack.error import SpackError
from spack.spec import Spec
@@ -42,10 +41,8 @@ JOB_RETRY_CONDITIONS = [
'always',
]
-SPACK_PR_MIRRORS_ROOT_URL = 's3://spack-binaries-prs'
-SPACK_SHARED_PR_MIRROR_URL = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
- 'shared_pr_mirror')
TEMP_STORAGE_MIRROR_NAME = 'ci_temporary_mirror'
+SPACK_RESERVED_TAGS = ["public", "protected", "notary"]
spack_gpg = spack.main.SpackCommand('gpg')
spack_compiler = spack.main.SpackCommand('compiler')
@@ -199,6 +196,11 @@ def _get_cdash_build_name(spec, build_group):
spec.name, spec.version, spec.compiler, spec.architecture, build_group)
+def _remove_reserved_tags(tags):
+ """Convenience function to strip reserved tags from jobs"""
+ return [tag for tag in tags if tag not in SPACK_RESERVED_TAGS]
+
+
def _get_spec_string(spec):
format_elements = [
'{name}{@version}',
@@ -231,8 +233,10 @@ def _add_dependency(spec_label, dep_label, deps):
deps[spec_label].add(dep_label)
-def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
- spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only)
+def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False,
+ mirrors_to_check=None):
+ spec_deps_obj = _compute_spec_deps(specs, check_index_only=check_index_only,
+ mirrors_to_check=mirrors_to_check)
if spec_deps_obj:
dependencies = spec_deps_obj['dependencies']
@@ -249,7 +253,7 @@ def _get_spec_dependencies(specs, deps, spec_labels, check_index_only=False):
_add_dependency(entry['spec'], entry['depends'], deps)
-def stage_spec_jobs(specs, check_index_only=False):
+def stage_spec_jobs(specs, check_index_only=False, mirrors_to_check=None):
"""Take a set of release specs and generate a list of "stages", where the
jobs in any stage are dependent only on jobs in previous stages. This
allows us to maximize build parallelism within the gitlab-ci framework.
@@ -261,6 +265,8 @@ def stage_spec_jobs(specs, check_index_only=False):
are up to date on those mirrors. This flag limits that search to
the binary cache indices on those mirrors to speed the process up,
even though there is no garantee the index is up to date.
+ mirrors_to_checK: Optional mapping giving mirrors to check instead of
+ any configured mirrors.
Returns: A tuple of information objects describing the specs, dependencies
and stages:
@@ -297,8 +303,8 @@ def stage_spec_jobs(specs, check_index_only=False):
deps = {}
spec_labels = {}
- _get_spec_dependencies(
- specs, deps, spec_labels, check_index_only=check_index_only)
+ _get_spec_dependencies(specs, deps, spec_labels, check_index_only=check_index_only,
+ mirrors_to_check=mirrors_to_check)
# Save the original deps, as we need to return them at the end of the
# function. In the while loop below, the "dependencies" variable is
@@ -340,7 +346,7 @@ def _print_staging_summary(spec_labels, dependencies, stages):
_get_spec_string(s)))
-def _compute_spec_deps(spec_list, check_index_only=False):
+def _compute_spec_deps(spec_list, check_index_only=False, mirrors_to_check=None):
"""
Computes all the dependencies for the spec(s) and generates a JSON
object which provides both a list of unique spec names as well as a
@@ -413,7 +419,7 @@ def _compute_spec_deps(spec_list, check_index_only=False):
continue
up_to_date_mirrors = bindist.get_mirrors_for_spec(
- spec=s, index_only=check_index_only)
+ spec=s, mirrors_to_check=mirrors_to_check, index_only=check_index_only)
skey = _spec_deps_key(s)
spec_labels[skey] = {
@@ -602,8 +608,8 @@ def get_spec_filter_list(env, affected_pkgs, dependencies=True, dependents=True)
def generate_gitlab_ci_yaml(env, print_summary, output_file,
prune_dag=False, check_index_only=False,
run_optimizer=False, use_dependencies=False,
- artifacts_root=None):
- """ Generate a gitlab yaml file to run a dynamic chile pipeline from
+ artifacts_root=None, remote_mirror_override=None):
+ """ Generate a gitlab yaml file to run a dynamic child pipeline from
the spec matrix in the active environment.
Arguments:
@@ -629,6 +635,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
artifacts_root (str): Path where artifacts like logs, environment
files (spack.yaml, spack.lock), etc should be written. GitLab
requires this to be within the project directory.
+ remote_mirror_override (str): Typically only needed when one spack.yaml
+ is used to populate several mirrors with binaries, based on some
+ criteria. Spack protected pipelines populate different mirrors based
+ on branch name, facilitated by this option.
"""
with spack.concretize.disable_compiler_existence_check():
with env.write_transaction():
@@ -678,17 +688,19 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
for s in affected_specs:
tty.debug(' {0}'.format(s.name))
- generate_job_name = os.environ.get('CI_JOB_NAME', None)
- parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', None)
+ # Downstream jobs will "need" (depend on, for both scheduling and
+ # artifacts, which include spack.lock file) this pipeline generation
+ # job by both name and pipeline id. If those environment variables
+ # do not exist, then maybe this is just running in a shell, in which
+ # case, there is no expectation gitlab will ever run the generated
+ # pipeline and those environment variables do not matter.
+ generate_job_name = os.environ.get('CI_JOB_NAME', 'job-does-not-exist')
+ parent_pipeline_id = os.environ.get('CI_PIPELINE_ID', 'pipeline-does-not-exist')
+ # Values: "spack_pull_request", "spack_protected_branch", or not set
spack_pipeline_type = os.environ.get('SPACK_PIPELINE_TYPE', None)
- is_pr_pipeline = spack_pipeline_type == 'spack_pull_request'
- spack_pr_branch = os.environ.get('SPACK_PR_BRANCH', None)
- pr_mirror_url = None
- if spack_pr_branch:
- pr_mirror_url = url_util.join(SPACK_PR_MIRRORS_ROOT_URL,
- spack_pr_branch)
+ spack_buildcache_copy = os.environ.get('SPACK_COPY_BUILDCACHE', None)
if 'mirrors' not in yaml_root or len(yaml_root['mirrors'].values()) < 1:
tty.die('spack ci generate requires an env containing a mirror')
@@ -743,14 +755,25 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
'strip-compilers': False,
})
- # Add per-PR mirror (and shared PR mirror) if enabled, as some specs might
- # be up to date in one of those and thus not need to be rebuilt.
- if pr_mirror_url:
- spack.mirror.add(
- 'ci_pr_mirror', pr_mirror_url, cfg.default_modify_scope())
- spack.mirror.add('ci_shared_pr_mirror',
- SPACK_SHARED_PR_MIRROR_URL,
- cfg.default_modify_scope())
+ # If a remote mirror override (alternate buildcache destination) was
+ # specified, add it here in case it has already built hashes we might
+ # generate.
+ mirrors_to_check = None
+ if remote_mirror_override:
+ if spack_pipeline_type == 'spack_protected_branch':
+ # Overriding the main mirror in this case might result
+ # in skipping jobs on a release pipeline because specs are
+ # up to date in develop. Eventually we want to notice and take
+ # advantage of this by scheduling a job to copy the spec from
+ # develop to the release, but until we have that, this makes
+ # sure we schedule a rebuild job if the spec isn't already in
+ # override mirror.
+ mirrors_to_check = {
+ 'override': remote_mirror_override
+ }
+ else:
+ spack.mirror.add(
+ 'ci_pr_mirror', remote_mirror_override, cfg.default_modify_scope())
pipeline_artifacts_dir = artifacts_root
if not pipeline_artifacts_dir:
@@ -825,11 +848,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
phase_spec.concretize()
staged_phases[phase_name] = stage_spec_jobs(
concrete_phase_specs,
- check_index_only=check_index_only)
+ check_index_only=check_index_only,
+ mirrors_to_check=mirrors_to_check)
finally:
- # Clean up PR mirror if enabled
- if pr_mirror_url:
- spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
+ # Clean up remote mirror override if enabled
+ if remote_mirror_override:
+ if spack_pipeline_type != 'spack_protected_branch':
+ spack.mirror.remove('ci_pr_mirror', cfg.default_modify_scope())
all_job_names = []
output_object = {}
@@ -889,6 +914,14 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
tags = [tag for tag in runner_attribs['tags']]
+ if spack_pipeline_type is not None:
+ # For spack pipelines "public" and "protected" are reserved tags
+ tags = _remove_reserved_tags(tags)
+ if spack_pipeline_type == 'spack_protected_branch':
+ tags.extend(['aws', 'protected'])
+ elif spack_pipeline_type == 'spack_pull_request':
+ tags.extend(['public'])
+
variables = {}
if 'variables' in runner_attribs:
variables.update(runner_attribs['variables'])
@@ -1174,6 +1207,10 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
service_job_config,
cleanup_job)
+ if 'tags' in cleanup_job:
+ service_tags = _remove_reserved_tags(cleanup_job['tags'])
+ cleanup_job['tags'] = service_tags
+
cleanup_job['stage'] = 'cleanup-temp-storage'
cleanup_job['script'] = [
'spack -d mirror destroy --mirror-url {0}/$CI_PIPELINE_ID'.format(
@@ -1181,9 +1218,74 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
]
cleanup_job['when'] = 'always'
cleanup_job['retry'] = service_job_retries
+ cleanup_job['interruptible'] = True
output_object['cleanup'] = cleanup_job
+ if ('signing-job-attributes' in gitlab_ci and
+ spack_pipeline_type == 'spack_protected_branch'):
+ # External signing: generate a job to check and sign binary pkgs
+ stage_names.append('stage-sign-pkgs')
+ signing_job_config = gitlab_ci['signing-job-attributes']
+ signing_job = {}
+
+ signing_job_attrs_to_copy = [
+ 'image',
+ 'tags',
+ 'variables',
+ 'before_script',
+ 'script',
+ 'after_script',
+ ]
+
+ _copy_attributes(signing_job_attrs_to_copy,
+ signing_job_config,
+ signing_job)
+
+ signing_job_tags = []
+ if 'tags' in signing_job:
+ signing_job_tags = _remove_reserved_tags(signing_job['tags'])
+
+ for tag in ['aws', 'protected', 'notary']:
+ if tag not in signing_job_tags:
+ signing_job_tags.append(tag)
+ signing_job['tags'] = signing_job_tags
+
+ signing_job['stage'] = 'stage-sign-pkgs'
+ signing_job['when'] = 'always'
+ signing_job['retry'] = {
+ 'max': 2,
+ 'when': ['always']
+ }
+ signing_job['interruptible'] = True
+
+ output_object['sign-pkgs'] = signing_job
+
+ if spack_buildcache_copy:
+ # Generate a job to copy the contents from wherever the builds are getting
+ # pushed to the url specified in the "SPACK_BUILDCACHE_COPY" environment
+ # variable.
+ src_url = remote_mirror_override or remote_mirror_url
+ dest_url = spack_buildcache_copy
+
+ stage_names.append('stage-copy-buildcache')
+ copy_job = {
+ 'stage': 'stage-copy-buildcache',
+ 'tags': ['spack', 'public', 'medium', 'aws', 'x86_64'],
+ 'image': 'ghcr.io/spack/python-aws-bash:0.0.1',
+ 'when': 'on_success',
+ 'interruptible': True,
+ 'retry': service_job_retries,
+ 'script': [
+ '. ./share/spack/setup-env.sh',
+ 'spack --version',
+ 'aws s3 sync --exclude *index.json* --exclude *pgp* {0} {1}'.format(
+ src_url, dest_url)
+ ]
+ }
+
+ output_object['copy-mirror'] = copy_job
+
if rebuild_index_enabled:
# Add a final job to regenerate the index
stage_names.append('stage-rebuild-index')
@@ -1194,9 +1296,13 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
service_job_config,
final_job)
+ if 'tags' in final_job:
+ service_tags = _remove_reserved_tags(final_job['tags'])
+ final_job['tags'] = service_tags
+
index_target_mirror = mirror_urls[0]
- if is_pr_pipeline:
- index_target_mirror = pr_mirror_url
+ if remote_mirror_override:
+ index_target_mirror = remote_mirror_override
final_job['stage'] = 'stage-rebuild-index'
final_job['script'] = [
@@ -1205,6 +1311,7 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
]
final_job['when'] = 'always'
final_job['retry'] = service_job_retries
+ final_job['interruptible'] = True
output_object['rebuild-index'] = final_job
@@ -1237,8 +1344,9 @@ def generate_gitlab_ci_yaml(env, print_summary, output_file,
'SPACK_PIPELINE_TYPE': str(spack_pipeline_type)
}
- if pr_mirror_url:
- output_object['variables']['SPACK_PR_MIRROR_URL'] = pr_mirror_url
+ if remote_mirror_override:
+ (output_object['variables']
+ ['SPACK_REMOTE_MIRROR_OVERRIDE']) = remote_mirror_override
spack_stack_name = os.environ.get('SPACK_CI_STACK_NAME', None)
if spack_stack_name:
diff --git a/lib/spack/spack/cmd/ci.py b/lib/spack/spack/cmd/ci.py
index 9687aceac8..fc30c1c1bf 100644
--- a/lib/spack/spack/cmd/ci.py
+++ b/lib/spack/spack/cmd/ci.py
@@ -64,6 +64,11 @@ def setup_parser(subparser):
'--dependencies', action='store_true', default=False,
help="(Experimental) disable DAG scheduling; use "
' "plain" dependencies.')
+ generate.add_argument(
+ '--buildcache-destination', default=None,
+ help="Override the mirror configured in the environment (spack.yaml) " +
+ "in order to push binaries from the generated pipeline to a " +
+ "different location.")
prune_group = generate.add_mutually_exclusive_group()
prune_group.add_argument(
'--prune-dag', action='store_true', dest='prune_dag',
@@ -127,6 +132,7 @@ def ci_generate(args):
prune_dag = args.prune_dag
index_only = args.index_only
artifacts_root = args.artifacts_root
+ buildcache_destination = args.buildcache_destination
if not output_file:
output_file = os.path.abspath(".gitlab-ci.yml")
@@ -140,7 +146,8 @@ def ci_generate(args):
spack_ci.generate_gitlab_ci_yaml(
env, True, output_file, prune_dag=prune_dag,
check_index_only=index_only, run_optimizer=run_optimizer,
- use_dependencies=use_dependencies, artifacts_root=artifacts_root)
+ use_dependencies=use_dependencies, artifacts_root=artifacts_root,
+ remote_mirror_override=buildcache_destination)
if copy_yaml_to:
copy_to_dir = os.path.dirname(copy_yaml_to)
@@ -180,6 +187,9 @@ def ci_rebuild(args):
if not gitlab_ci:
tty.die('spack ci rebuild requires an env containing gitlab-ci cfg')
+ tty.msg('SPACK_BUILDCACHE_DESTINATION={0}'.format(
+ os.environ.get('SPACK_BUILDCACHE_DESTINATION', None)))
+
# Grab the environment variables we need. These either come from the
# pipeline generation step ("spack ci generate"), where they were written
# out as variables, or else provided by GitLab itself.
@@ -196,7 +206,7 @@ def ci_rebuild(args):
compiler_action = get_env_var('SPACK_COMPILER_ACTION')
cdash_build_name = get_env_var('SPACK_CDASH_BUILD_NAME')
spack_pipeline_type = get_env_var('SPACK_PIPELINE_TYPE')
- pr_mirror_url = get_env_var('SPACK_PR_MIRROR_URL')
+ remote_mirror_override = get_env_var('SPACK_REMOTE_MIRROR_OVERRIDE')
remote_mirror_url = get_env_var('SPACK_REMOTE_MIRROR_URL')
# Construct absolute paths relative to current $CI_PROJECT_DIR
@@ -244,6 +254,10 @@ def ci_rebuild(args):
tty.debug('Pipeline type - PR: {0}, develop: {1}'.format(
spack_is_pr_pipeline, spack_is_develop_pipeline))
+ # If no override url exists, then just push binary package to the
+ # normal remote mirror url.
+ buildcache_mirror_url = remote_mirror_override or remote_mirror_url
+
# Figure out what is our temporary storage mirror: Is it artifacts
# buildcache? Or temporary-storage-url-prefix? In some cases we need to
# force something or pipelines might not have a way to propagate build
@@ -373,7 +387,24 @@ def ci_rebuild(args):
cfg.default_modify_scope())
# Check configured mirrors for a built spec with a matching hash
- matches = bindist.get_mirrors_for_spec(job_spec, index_only=False)
+ mirrors_to_check = None
+ if remote_mirror_override and spack_pipeline_type == 'spack_protected_branch':
+ # Passing "mirrors_to_check" below means we *only* look in the override
+ # mirror to see if we should skip building, which is what we want.
+ mirrors_to_check = {
+ 'override': remote_mirror_override
+ }
+
+ # Adding this mirror to the list of configured mirrors means dependencies
+ # could be installed from either the override mirror or any other configured
+ # mirror (e.g. remote_mirror_url which is defined in the environment or
+ # pipeline_mirror_url), which is also what we want.
+ spack.mirror.add('mirror_override',
+ remote_mirror_override,
+ cfg.default_modify_scope())
+
+ matches = bindist.get_mirrors_for_spec(
+ job_spec, mirrors_to_check=mirrors_to_check, index_only=False)
if matches:
# Got a hash match on at least one configured mirror. All
@@ -517,13 +548,6 @@ def ci_rebuild(args):
# any logs from the staging directory to artifacts now
spack_ci.copy_stage_logs_to_artifacts(job_spec, job_log_dir)
- # Create buildcache on remote mirror, either on pr-specific mirror or
- # on the main mirror defined in the gitlab-enabled spack environment
- if spack_is_pr_pipeline:
- buildcache_mirror_url = pr_mirror_url
- else:
- buildcache_mirror_url = remote_mirror_url
-
# If the install succeeded, create a buildcache entry for this job spec
# and push it to one or more mirrors. If the install did not succeed,
# print out some instructions on how to reproduce this build failure
diff --git a/lib/spack/spack/schema/gitlab_ci.py b/lib/spack/spack/schema/gitlab_ci.py
index 4a08f8d6ce..80e40dd72b 100644
--- a/lib/spack/spack/schema/gitlab_ci.py
+++ b/lib/spack/spack/schema/gitlab_ci.py
@@ -110,6 +110,7 @@ core_shared_properties = union_dicts(
},
},
'service-job-attributes': runner_selector_schema,
+ 'signing-job-attributes': runner_selector_schema,
'rebuild-index': {'type': 'boolean'},
'broken-specs-url': {'type': 'string'},
},
diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py
index ba422f0923..f6b6bb8c19 100644
--- a/lib/spack/spack/test/cmd/ci.py
+++ b/lib/spack/spack/test/cmd/ci.py
@@ -635,10 +635,6 @@ spack:
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
- monkeypatch.setattr(
- ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
- monkeypatch.setattr(
- ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as f:
@@ -683,10 +679,6 @@ spack:
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
- monkeypatch.setattr(
- ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
- monkeypatch.setattr(
- ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as f:
@@ -920,6 +912,77 @@ spack:
env_cmd('deactivate')
+def test_ci_generate_mirror_override(tmpdir, mutable_mock_env_path,
+ install_mockery_mutable_config, mock_packages,
+ mock_fetch, mock_stage, mock_binary_index,
+ ci_base_environment):
+ """Ensure that protected pipelines using --buildcache-destination do not
+ skip building specs that are not in the override mirror when they are
+ found in the main mirror."""
+ os.environ.update({
+ 'SPACK_PIPELINE_TYPE': 'spack_protected_branch',
+ })
+
+ working_dir = tmpdir.join('working_dir')
+
+ mirror_dir = working_dir.join('mirror')
+ mirror_url = 'file://{0}'.format(mirror_dir.strpath)
+
+ spack_yaml_contents = """
+spack:
+ definitions:
+ - packages: [patchelf]
+ specs:
+ - $packages
+ mirrors:
+ test-mirror: {0}
+ gitlab-ci:
+ mappings:
+ - match:
+ - patchelf
+ runner-attributes:
+ tags:
+ - donotcare
+ image: donotcare
+ service-job-attributes:
+ tags:
+ - nonbuildtag
+ image: basicimage
+""".format(mirror_url)
+
+ filename = str(tmpdir.join('spack.yaml'))
+ with open(filename, 'w') as f:
+ f.write(spack_yaml_contents)
+
+ with tmpdir.as_cwd():
+ env_cmd('create', 'test', './spack.yaml')
+ first_ci_yaml = str(tmpdir.join('.gitlab-ci-1.yml'))
+ second_ci_yaml = str(tmpdir.join('.gitlab-ci-2.yml'))
+ with ev.read('test'):
+ install_cmd()
+ buildcache_cmd('create', '-u', '--mirror-url', mirror_url, 'patchelf')
+ buildcache_cmd('update-index', '--mirror-url', mirror_url, output=str)
+
+ # This generate should not trigger a rebuild of patchelf, since it's in
+ # the main mirror referenced in the environment.
+ ci_cmd('generate', '--check-index-only', '--output-file', first_ci_yaml)
+
+ # Because we used a mirror override (--buildcache-destination) on a
+ # spack protected pipeline, we expect to only look in the override
+ # mirror for the spec, and thus the patchelf job should be generated in
+ # this pipeline
+ ci_cmd('generate', '--check-index-only', '--output-file', second_ci_yaml,
+ '--buildcache-destination', 'file:///mirror/not/exist')
+
+ with open(first_ci_yaml) as fd1:
+ first_yaml = fd1.read()
+ assert 'no-specs-to-rebuild' in first_yaml
+
+ with open(second_ci_yaml) as fd2:
+ second_yaml = fd2.read()
+ assert 'no-specs-to-rebuild' not in second_yaml
+
+
@pytest.mark.disable_clean_stage_check
def test_push_mirror_contents(tmpdir, mutable_mock_env_path,
install_mockery_mutable_config, mock_packages,
@@ -1151,10 +1214,6 @@ spack:
with ev.read('test'):
monkeypatch.setattr(
spack.main, 'get_version', lambda: '0.15.3-416-12ad69eb1')
- monkeypatch.setattr(
- ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
- monkeypatch.setattr(
- ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as f:
@@ -1256,10 +1315,6 @@ spack:
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
- monkeypatch.setattr(
- ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
- monkeypatch.setattr(
- ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
ci_cmd('generate', '--output-file', outputfile, '--dependencies')
with open(outputfile) as f:
@@ -1417,11 +1472,6 @@ spack:
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
with ev.read('test'):
- monkeypatch.setattr(
- ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
- monkeypatch.setattr(
- ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
-
ci_cmd('generate', '--output-file', outputfile)
with open(outputfile) as of:
@@ -1630,11 +1680,6 @@ spack:
env_cmd('create', 'test', './spack.yaml')
outputfile = str(tmpdir.join('.gitlab-ci.yml'))
- monkeypatch.setattr(
- ci, 'SPACK_PR_MIRRORS_ROOT_URL', r"file:///fake/mirror")
- monkeypatch.setattr(
- ci, 'SPACK_SHARED_PR_MIRROR_URL', r"file:///fake/mirror_two")
-
with ev.read('test'):
ci_cmd('generate', '--output-file', outputfile)
@@ -1715,6 +1760,64 @@ spack:
assert(ex not in output)
+def test_ci_generate_external_signing_job(tmpdir, mutable_mock_env_path,
+ install_mockery,
+ mock_packages, monkeypatch,
+ ci_base_environment):
+ """Verify that in external signing mode: 1) each rebuild jobs includes
+ the location where the binary hash information is written and 2) we
+ properly generate a final signing job in the pipeline."""
+ os.environ.update({
+ 'SPACK_PIPELINE_TYPE': 'spack_protected_branch'
+ })
+ filename = str(tmpdir.join('spack.yaml'))
+ with open(filename, 'w') as f:
+ f.write("""\
+spack:
+ specs:
+ - archive-files
+ mirrors:
+ some-mirror: https://my.fake.mirror
+ gitlab-ci:
+ temporary-storage-url-prefix: file:///work/temp/mirror
+ mappings:
+ - match:
+ - archive-files
+ runner-attributes:
+ tags:
+ - donotcare
+ image: donotcare
+ signing-job-attributes:
+ tags:
+ - nonbuildtag
+ - secretrunner
+ image:
+ name: customdockerimage
+ entrypoint: []
+ variables:
+ IMPORTANT_INFO: avalue
+ script:
+ - echo hello
+""")
+
+ with tmpdir.as_cwd():
+ env_cmd('create', 'test', './spack.yaml')
+ outputfile = str(tmpdir.join('.gitlab-ci.yml'))
+
+ with ev.read('test'):
+ ci_cmd('generate', '--output-file', outputfile)
+
+ with open(outputfile) as of:
+ pipeline_doc = syaml.load(of.read())
+
+ assert 'sign-pkgs' in pipeline_doc
+ signing_job = pipeline_doc['sign-pkgs']
+ assert 'tags' in signing_job
+ signing_job_tags = signing_job['tags']
+ for expected_tag in ['notary', 'protected', 'aws']:
+ assert expected_tag in signing_job_tags
+
+
def test_ci_reproduce(tmpdir, mutable_mock_env_path,
install_mockery, mock_packages, monkeypatch,
last_two_git_commits, ci_base_environment, mock_binary_index):