diff options
author | Massimiliano Culpo <massimiliano.culpo@gmail.com> | 2024-08-16 13:42:01 +0200 |
---|---|---|
committer | GitHub <noreply@github.com> | 2024-08-16 13:42:01 +0200 |
commit | 4e9fbca0337babd47e69c685700cb870653cf2f5 (patch) | |
tree | 7c0897ba3399e6189008266a57ab08cd05956502 | |
parent | a2fd26bbcc26b04064c2079c2fa8897fe4893212 (diff) | |
download | spack-4e9fbca0337babd47e69c685700cb870653cf2f5.tar.gz spack-4e9fbca0337babd47e69c685700cb870653cf2f5.tar.bz2 spack-4e9fbca0337babd47e69c685700cb870653cf2f5.tar.xz spack-4e9fbca0337babd47e69c685700cb870653cf2f5.zip |
Clean up `test/cmd/ci.py` (#45774)
* Use absolute paths instead of https:// fake mirrors (this speed-up tests by avoiding requests)
* Add a fixture to gather in a single place code that is copy/pasted in a lot of tests
* General clean-up of tests and repeated code
Co-authored-by: Harmen Stoppels <me@harmenstoppels.nl>
-rw-r--r-- | lib/spack/spack/ci.py | 4 | ||||
-rw-r--r-- | lib/spack/spack/test/cmd/ci.py | 1635 |
2 files changed, 603 insertions, 1036 deletions
diff --git a/lib/spack/spack/ci.py b/lib/spack/spack/ci.py index 57a15bbd54..842e944a9a 100644 --- a/lib/spack/spack/ci.py +++ b/lib/spack/spack/ci.py @@ -1433,10 +1433,6 @@ def copy_stage_logs_to_artifacts(job_spec: spack.spec.Spec, job_log_dir: str) -> job_log_dir: path into which build log should be copied """ tty.debug(f"job spec: {job_spec}") - if not job_spec: - msg = f"Cannot copy stage logs: job spec ({job_spec}) is required" - tty.error(msg) - return try: pkg_cls = spack.repo.PATH.get_pkg_class(job_spec.name) diff --git a/lib/spack/spack/test/cmd/ci.py b/lib/spack/spack/test/cmd/ci.py index 1747c4a26a..012c119ad7 100644 --- a/lib/spack/spack/test/cmd/ci.py +++ b/lib/spack/spack/test/cmd/ci.py @@ -2,11 +2,12 @@ # Spack Project Developers. See the top-level COPYRIGHT file for details. # # SPDX-License-Identifier: (Apache-2.0 OR MIT) - import filecmp import json import os +import pathlib import shutil +from typing import NamedTuple import jsonschema import pytest @@ -25,13 +26,11 @@ import spack.paths as spack_paths import spack.repo as repo import spack.util.gpg import spack.util.spack_yaml as syaml -import spack.util.url as url_util from spack.cmd.ci import FAILED_CREATE_BUILDCACHE_CODE from spack.schema.buildcache_spec import schema as specfile_schema from spack.schema.ci import schema as ci_schema from spack.schema.database_index import schema as db_idx_schema from spack.spec import Spec -from spack.util.pattern import Bunch config_cmd = spack.main.SpackCommand("config") ci_cmd = spack.main.SpackCommand("ci") @@ -42,7 +41,11 @@ install_cmd = spack.main.SpackCommand("install") uninstall_cmd = spack.main.SpackCommand("uninstall") buildcache_cmd = spack.main.SpackCommand("buildcache") -pytestmark = [pytest.mark.not_on_windows("does not run on windows"), pytest.mark.maybeslow] +pytestmark = [ + pytest.mark.usefixtures("mock_packages"), + pytest.mark.not_on_windows("does not run on windows"), + pytest.mark.maybeslow, +] @pytest.fixture() @@ -89,6 +92,35 @@ testjob: yield repo_path +@pytest.fixture() +def ci_generate_test(tmp_path, mutable_mock_env_path, install_mockery, ci_base_environment): + """Returns a function that creates a new test environment, and runs 'spack generate' + on it, given the content of the spack.yaml file. + + Additional positional arguments will be added to the 'spack generate' call. + """ + + def _func(spack_yaml_content, *args, fail_on_error=True): + spack_yaml = tmp_path / "spack.yaml" + spack_yaml.write_text(spack_yaml_content) + + env_cmd("create", "test", str(spack_yaml)) + outputfile = tmp_path / ".gitlab-ci.yml" + with ev.read("test"): + output = ci_cmd( + "generate", + "--output-file", + str(outputfile), + *args, + output=str, + fail_on_error=fail_on_error, + ) + + return spack_yaml, outputfile, output + + return _func + + def test_specs_staging(config, tmpdir): """Make sure we achieve the best possible staging for the following spec DAG:: @@ -145,21 +177,13 @@ and then 'd', 'b', and 'a' to be put in the next three stages, respectively. assert spec_a_label in stages[3] -def test_ci_generate_with_env( - tmpdir, - mutable_mock_env_path, - install_mockery, - mock_packages, - ci_base_environment, - mock_binary_index, -): +def test_ci_generate_with_env(ci_generate_test, tmp_path, mock_binary_index): """Make sure we can get a .gitlab-ci.yml from an environment file - which has the gitlab-ci, cdash, and mirrors sections.""" - mirror_url = "https://my.fake.mirror" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + which has the gitlab-ci, cdash, and mirrors sections. + """ + mirror_url = tmp_path / "ci-mirror" + spack_yaml, outputfile, _ = ci_generate_test( + f"""\ spack: definitions: - old-gcc-pkgs: @@ -174,7 +198,7 @@ spack: - matrix: - [$old-gcc-pkgs] mirrors: - some-mirror: {0} + some-mirror: {mirror_url} ci: pipeline-gen: - submapping: @@ -201,92 +225,54 @@ spack: url: https://my.fake.cdash project: Not used site: Nothing -""".format( - mirror_url - ) - ) - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) +""" + ) + yaml_contents = syaml.load(outputfile.read_text()) - with open(outputfile) as f: - contents = f.read() - yaml_contents = syaml.load(contents) - assert "workflow" in yaml_contents - assert "rules" in yaml_contents["workflow"] - assert yaml_contents["workflow"]["rules"] == [{"when": "always"}] + assert "workflow" in yaml_contents + assert "rules" in yaml_contents["workflow"] + assert yaml_contents["workflow"]["rules"] == [{"when": "always"}] - assert "stages" in yaml_contents - assert len(yaml_contents["stages"]) == 5 - assert yaml_contents["stages"][0] == "stage-0" - assert yaml_contents["stages"][4] == "stage-rebuild-index" + assert "stages" in yaml_contents + assert len(yaml_contents["stages"]) == 5 + assert yaml_contents["stages"][0] == "stage-0" + assert yaml_contents["stages"][4] == "stage-rebuild-index" - assert "rebuild-index" in yaml_contents - rebuild_job = yaml_contents["rebuild-index"] - expected = "spack buildcache update-index --keys {0}".format(mirror_url) - assert rebuild_job["script"][0] == expected - assert rebuild_job["custom_attribute"] == "custom!" + assert "rebuild-index" in yaml_contents + rebuild_job = yaml_contents["rebuild-index"] + assert rebuild_job["script"][0] == f"spack buildcache update-index --keys {mirror_url}" + assert rebuild_job["custom_attribute"] == "custom!" - assert "variables" in yaml_contents - assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"] - artifacts_root = yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] - assert artifacts_root == "jobs_scratch_dir" + assert "variables" in yaml_contents + assert "SPACK_ARTIFACTS_ROOT" in yaml_contents["variables"] + assert yaml_contents["variables"]["SPACK_ARTIFACTS_ROOT"] == "jobs_scratch_dir" -def test_ci_generate_with_env_missing_section( - tmpdir, - working_env, - mutable_mock_env_path, - install_mockery, - mock_packages, - ci_base_environment, - mock_binary_index, -): +def test_ci_generate_with_env_missing_section(ci_generate_test, tmp_path, mock_binary_index): """Make sure we get a reasonable message if we omit gitlab-ci section""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + _, _, output = ci_generate_test( + f"""\ spack: specs: - archive-files mirrors: - some-mirror: https://my.fake.mirror -""" - ) - - expect_out = "Environment does not have `ci` a configuration" - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - - with ev.read("test"): - output = ci_cmd("generate", fail_on_error=False, output=str) - assert expect_out in output + some-mirror: {tmp_path / 'ci-mirror'} +""", + fail_on_error=False, + ) + assert "Environment does not have `ci` a configuration" in output -def test_ci_generate_with_cdash_token( - tmpdir, - mutable_mock_env_path, - install_mockery, - mock_packages, - ci_base_environment, - mock_binary_index, -): +def test_ci_generate_with_cdash_token(ci_generate_test, tmp_path, mock_binary_index, monkeypatch): """Make sure we it doesn't break if we configure cdash""" - os.environ.update({"SPACK_CDASH_AUTH_TOKEN": "notreallyatokenbutshouldnotmatter"}) - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + monkeypatch.setenv("SPACK_CDASH_AUTH_TOKEN", "notreallyatokenbutshouldnotmatter") + backup_file = tmp_path / "backup-ci.yml" + spack_yaml_content = f"""\ spack: specs: - archive-files mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {tmp_path / "ci-mirror"} ci: enable-artifacts-buildcache: True pipeline-gen: @@ -299,53 +285,35 @@ spack: image: donotcare cdash: build-group: Not important - url: https://my.fake.cdash + url: {(tmp_path / "cdash").as_uri()} project: Not used site: Nothing """ - ) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - - with ev.read("test"): - copy_to_file = str(tmpdir.join("backup-ci.yml")) - output = ci_cmd("generate", "--copy-to", copy_to_file, output=str) - # That fake token should still have resulted in being unable to - # register build group with cdash, but the workload should - # still have been generated. - expect = "Problem populating buildgroup" - assert expect in output - - dir_contents = os.listdir(tmpdir.strpath) - - assert "backup-ci.yml" in dir_contents - - orig_file = str(tmpdir.join(".gitlab-ci.yml")) + spack_yaml, original_file, output = ci_generate_test( + spack_yaml_content, "--copy-to", str(backup_file) + ) - assert filecmp.cmp(orig_file, copy_to_file) is True + # That fake token should still have resulted in being unable to + # register build group with cdash, but the workload should + # still have been generated. + assert "Problem populating buildgroup" in output + assert backup_file.exists() + assert filecmp.cmp(str(original_file), str(backup_file)) def test_ci_generate_with_custom_settings( - tmpdir, - working_env, - mutable_mock_env_path, - install_mockery, - mock_packages, - monkeypatch, - ci_base_environment, - mock_binary_index, + ci_generate_test, tmp_path, mock_binary_index, monkeypatch ): """Test use of user-provided scripts and attributes""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + monkeypatch.setattr(spack.main, "get_version", lambda: "0.15.3") + monkeypatch.setattr(spack.main, "get_spack_commit", lambda: "big ol commit sha") + spack_yaml, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - archive-files mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {tmp_path / "ci-mirror"} ci: pipeline-gen: - submapping: @@ -356,13 +324,13 @@ spack: - donotcare variables: ONE: plain-string-value - TWO: ${INTERP_ON_BUILD} + TWO: ${{INTERP_ON_BUILD}} before_script: - mkdir /some/path - pushd /some/path - - git clone ${SPACK_REPO} + - git clone ${{SPACK_REPO}} - cd spack - - git checkout ${SPACK_REF} + - git checkout ${{SPACK_REF}} - popd script: - spack -d ci rebuild @@ -373,76 +341,54 @@ spack: paths: - some/custom/artifact """ - ) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - monkeypatch.setattr(spack.main, "get_version", lambda: "0.15.3") - monkeypatch.setattr(spack.main, "get_spack_commit", lambda: "big ol commit sha") - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as f: - contents = f.read() - yaml_contents = syaml.load(contents) + ) + yaml_contents = syaml.load(outputfile.read_text()) + + assert yaml_contents["variables"]["SPACK_VERSION"] == "0.15.3" + assert yaml_contents["variables"]["SPACK_CHECKOUT_VERSION"] == "big ol commit sha" + + assert any("archive-files" in key for key in yaml_contents) + for ci_key, ci_obj in yaml_contents.items(): + if "archive-files" not in ci_key: + continue + + # Ensure we have variables, possibly interpolated + assert ci_obj["variables"]["ONE"] == "plain-string-value" + assert ci_obj["variables"]["TWO"] == "${INTERP_ON_BUILD}" + + # Ensure we have scripts verbatim + assert ci_obj["before_script"] == [ + "mkdir /some/path", + "pushd /some/path", + "git clone ${SPACK_REPO}", + "cd spack", + "git checkout ${SPACK_REF}", + "popd", + ] + assert ci_obj["script"][1].startswith("cd ") + ci_obj["script"][1] = "cd ENV" + assert ci_obj["script"] == [ + "spack -d ci rebuild", + "cd ENV", + "spack env activate --without-view .", + "spack ci rebuild", + ] + assert ci_obj["after_script"] == ["rm -rf /some/path/spack"] - found_it = False + # Ensure we have the custom attributes + assert "some/custom/artifact" in ci_obj["artifacts"]["paths"] + assert ci_obj["custom_attribute"] == "custom!" - global_vars = yaml_contents["variables"] - assert global_vars["SPACK_VERSION"] == "0.15.3" - assert global_vars["SPACK_CHECKOUT_VERSION"] == "big ol commit sha" - for ci_key in yaml_contents.keys(): - ci_obj = yaml_contents[ci_key] - if "archive-files" in ci_key: - # Ensure we have variables, possibly interpolated - var_d = ci_obj["variables"] - assert var_d["ONE"] == "plain-string-value" - assert var_d["TWO"] == "${INTERP_ON_BUILD}" - - # Ensure we have scripts verbatim - assert ci_obj["before_script"] == [ - "mkdir /some/path", - "pushd /some/path", - "git clone ${SPACK_REPO}", - "cd spack", - "git checkout ${SPACK_REF}", - "popd", - ] - assert ci_obj["script"][1].startswith("cd ") - ci_obj["script"][1] = "cd ENV" - assert ci_obj["script"] == [ - "spack -d ci rebuild", - "cd ENV", - "spack env activate --without-view .", - "spack ci rebuild", - ] - assert ci_obj["after_script"] == ["rm -rf /some/path/spack"] - - # Ensure we have the custom attributes - assert "some/custom/artifact" in ci_obj["artifacts"]["paths"] - assert ci_obj["custom_attribute"] == "custom!" - - found_it = True - - assert found_it - - -def test_ci_generate_pkg_with_deps( - tmpdir, working_env, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment -): +def test_ci_generate_pkg_with_deps(ci_generate_test, tmp_path, ci_base_environment): """Test pipeline generation for a package w/ dependencies""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + spack_yaml, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - flatten-deps mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {tmp_path / 'ci-mirror'} ci: enable-artifacts-buildcache: True pipeline-gen: @@ -458,58 +404,40 @@ spack: tags: - donotcare """ - ) + ) + yaml_contents = syaml.load(outputfile.read_text()) - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) + found = [] + for ci_key, ci_obj in yaml_contents.items(): + if "dependency-install" in ci_key: + assert "stage" in ci_obj + assert ci_obj["stage"] == "stage-0" + found.append("dependency-install") + if "flatten-deps" in ci_key: + assert "stage" in ci_obj + assert ci_obj["stage"] == "stage-1" + found.append("flatten-deps") - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as f: - contents = f.read() - yaml_contents = syaml.load(contents) - found = [] - for ci_key in yaml_contents.keys(): - ci_obj = yaml_contents[ci_key] - if "dependency-install" in ci_key: - assert "stage" in ci_obj - assert ci_obj["stage"] == "stage-0" - found.append("dependency-install") - if "flatten-deps" in ci_key: - assert "stage" in ci_obj - assert ci_obj["stage"] == "stage-1" - found.append("flatten-deps") - - assert "flatten-deps" in found - assert "dependency-install" in found - - -def test_ci_generate_for_pr_pipeline( - tmpdir, - working_env, - mutable_mock_env_path, - install_mockery, - mock_packages, - monkeypatch, - ci_base_environment, -): + assert "flatten-deps" in found + assert "dependency-install" in found + + +def test_ci_generate_for_pr_pipeline(ci_generate_test, tmp_path, monkeypatch): """Test that PR pipelines do not include a final stage job for rebuilding the mirror index, even if that job is specifically - configured""" - os.environ.update( - {"SPACK_PIPELINE_TYPE": "spack_pull_request", "SPACK_PR_BRANCH": "fake-test-branch"} - ) - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + configured. + """ + monkeypatch.setenv("SPACK_PIPELINE_TYPE", "spack_pull_request") + monkeypatch.setenv("SPACK_PR_BRANCH", "fake-test-branch") + monkeypatch.setattr(spack.ci, "SHARED_PR_MIRROR_URL", f"{tmp_path / 'shared-pr-mirror'}") + + spack_yaml, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - flatten-deps mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {tmp_path / 'ci-mirror'} ci: enable-artifacts-buildcache: True pipeline-gen: @@ -529,49 +457,25 @@ spack: tags: [donotcare] rebuild-index: False """ - ) - - monkeypatch.setattr(spack.ci, "SHARED_PR_MIRROR_URL", "https://fake.shared.pr.mirror") - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as f: - contents = f.read() - yaml_contents = syaml.load(contents) - - assert "rebuild-index" not in yaml_contents + ) + yaml_contents = syaml.load(outputfile.read_text()) - assert "variables" in yaml_contents - pipeline_vars = yaml_contents["variables"] - assert "SPACK_PIPELINE_TYPE" in pipeline_vars - assert pipeline_vars["SPACK_PIPELINE_TYPE"] == "spack_pull_request" + assert "rebuild-index" not in yaml_contents + assert "variables" in yaml_contents + assert "SPACK_PIPELINE_TYPE" in yaml_contents["variables"] + assert yaml_contents["variables"]["SPACK_PIPELINE_TYPE"] == "spack_pull_request" -def test_ci_generate_with_external_pkg( - tmpdir, - working_env, - mutable_mock_env_path, - install_mockery, - mock_packages, - monkeypatch, - ci_base_environment, -): +def test_ci_generate_with_external_pkg(ci_generate_test, tmp_path, monkeypatch): """Make sure we do not generate jobs for external pkgs""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + spack_yaml, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - archive-files - externaltest mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {tmp_path / "ci-mirror"} ci: pipeline-gen: - submapping: @@ -583,86 +487,84 @@ spack: - donotcare image: donotcare """ - ) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as f: - yaml_contents = syaml.load(f) - - # Check that the "externaltool" package was not erroneously staged - assert not any("externaltool" in key for key in yaml_contents) - - -def test_ci_rebuild_missing_config(tmpdir, working_env, mutable_mock_env_path): - spack_yaml_contents = """ -spack: - specs: - - archive-files -""" - - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - env_cmd("activate", "--without-view", "--sh", "test") - out = ci_cmd("rebuild", fail_on_error=False) - assert "env containing ci" in out + ) + yaml_contents = syaml.load(outputfile.read_text()) + # Check that the "externaltool" package was not erroneously staged + assert all("externaltool" not in key for key in yaml_contents) + + +def test_ci_rebuild_missing_config(tmp_path, working_env, mutable_mock_env_path): + spack_yaml = tmp_path / "spack.yaml" + spack_yaml.write_text( + """ + spack: + specs: + - archive-files + """ + ) - env_cmd("deactivate") + env_cmd("create", "test", str(spack_yaml)) + env_cmd("activate", "--without-view", "--sh", "test") + out = ci_cmd("rebuild", fail_on_error=False) + assert "env containing ci" in out + env_cmd("deactivate") def _signing_key(): - signing_key_dir = spack_paths.mock_gpg_keys_path - signing_key_path = os.path.join(signing_key_dir, "package-signing-key") - with open(signing_key_path) as fd: - key = fd.read() - return key - - -def create_rebuild_env(tmpdir, pkg_name, broken_tests=False): - working_dir = tmpdir.join("working_dir") - - log_dir = os.path.join(working_dir.strpath, "logs") - repro_dir = os.path.join(working_dir.strpath, "repro") - test_dir = os.path.join(working_dir.strpath, "test") - env_dir = working_dir.join("concrete_env") - - mirror_dir = working_dir.join("mirror") - mirror_url = url_util.path_to_file_url(mirror_dir.strpath) - - broken_specs_path = os.path.join(working_dir.strpath, "naughty-list") - broken_specs_url = url_util.path_to_file_url(broken_specs_path) - temp_storage_url = "file:///path/to/per/pipeline/storage" - - broken_tests_packages = [pkg_name] if broken_tests else [] + signing_key_path = pathlib.Path(spack_paths.mock_gpg_keys_path) / "package-signing-key" + return signing_key_path.read_text() + + +class RebuildEnv(NamedTuple): + broken_spec_file: pathlib.Path + ci_job_url: str + ci_pipeline_url: str + env_dir: pathlib.Path + log_dir: pathlib.Path + mirror_dir: pathlib.Path + mirror_url: str + repro_dir: pathlib.Path + root_spec_dag_hash: str + test_dir: pathlib.Path + working_dir: pathlib.Path + + +def create_rebuild_env( + tmp_path: pathlib.Path, pkg_name: str, broken_tests: bool = False +) -> RebuildEnv: + scratch = tmp_path / "working_dir" + log_dir = scratch / "logs" + repro_dir = scratch / "repro" + test_dir = scratch / "test" + env_dir = scratch / "concrete_env" + mirror_dir = scratch / "mirror" + broken_specs_path = scratch / "naughty-list" + + mirror_url = mirror_dir.as_uri() + temp_storage_url = (tmp_path / "temp-storage").as_uri() ci_job_url = "https://some.domain/group/project/-/jobs/42" ci_pipeline_url = "https://some.domain/group/project/-/pipelines/7" - spack_yaml_contents = """ + env_dir.mkdir(parents=True) + with open(env_dir / "spack.yaml", "w") as f: + f.write( + f""" spack: definitions: - - packages: [{0}] + - packages: [{pkg_name}] specs: - $packages mirrors: - test-mirror: {1} + test-mirror: {mirror_dir} ci: - broken-specs-url: {2} - broken-tests-packages: {3} - temporary-storage-url-prefix: {4} + broken-specs-url: {broken_specs_path.as_uri()} + broken-tests-packages: {json.dumps([pkg_name] if broken_tests else [])} + temporary-storage-url-prefix: {temp_storage_url} pipeline-gen: - submapping: - match: - - {0} + - {pkg_name} build-job: tags: - donotcare @@ -672,37 +574,19 @@ spack: url: https://my.fake.cdash project: Not used site: Nothing -""".format( - pkg_name, mirror_url, broken_specs_url, broken_tests_packages, temp_storage_url - ) - - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - with ev.read("test") as env: - with env.write_transaction(): - env.concretize() - env.write() - - if not os.path.exists(env_dir.strpath): - os.makedirs(env_dir.strpath) - - shutil.copyfile(env.manifest_path, os.path.join(env_dir.strpath, "spack.yaml")) - shutil.copyfile(env.lock_path, os.path.join(env_dir.strpath, "spack.lock")) +""" + ) - root_spec_dag_hash = None + with ev.Environment(env_dir) as env: + env.concretize() + env.write() - for h, s in env.specs_by_hash.items(): - if s.name == pkg_name: - root_spec_dag_hash = h + shutil.copy(env_dir / "spack.yaml", tmp_path / "spack.yaml") - assert root_spec_dag_hash + root_spec_dag_hash = env.concrete_roots()[0].dag_hash() - return Bunch( - broken_spec_file=os.path.join(broken_specs_path, root_spec_dag_hash), + return RebuildEnv( + broken_spec_file=broken_specs_path / root_spec_dag_hash, ci_job_url=ci_job_url, ci_pipeline_url=ci_pipeline_url, env_dir=env_dir, @@ -712,22 +596,22 @@ spack: repro_dir=repro_dir, root_spec_dag_hash=root_spec_dag_hash, test_dir=test_dir, - working_dir=working_dir, + working_dir=scratch, ) -def activate_rebuild_env(tmpdir, pkg_name, rebuild_env): +def activate_rebuild_env(tmp_path: pathlib.Path, pkg_name: str, rebuild_env: RebuildEnv): env_cmd("activate", "--without-view", "--sh", "-d", ".") # Create environment variables as gitlab would do it os.environ.update( { - "SPACK_ARTIFACTS_ROOT": rebuild_env.working_dir.strpath, - "SPACK_JOB_LOG_DIR": rebuild_env.log_dir, - "SPACK_JOB_REPRO_DIR": rebuild_env.repro_dir, - "SPACK_JOB_TEST_DIR": rebuild_env.test_dir, - "SPACK_LOCAL_MIRROR_DIR": rebuild_env.mirror_dir.strpath, - "SPACK_CONCRETE_ENV_DIR": rebuild_env.env_dir.strpath, + "SPACK_ARTIFACTS_ROOT": str(rebuild_env.working_dir), + "SPACK_JOB_LOG_DIR": str(rebuild_env.log_dir), + "SPACK_JOB_REPRO_DIR": str(rebuild_env.repro_dir), + "SPACK_JOB_TEST_DIR": str(rebuild_env.test_dir), + "SPACK_LOCAL_MIRROR_DIR": str(rebuild_env.mirror_dir), + "SPACK_CONCRETE_ENV_DIR": str(rebuild_env.env_dir), "CI_PIPELINE_ID": "7192", "SPACK_SIGNING_KEY": _signing_key(), "SPACK_JOB_SPEC_DAG_HASH": rebuild_env.root_spec_dag_hash, @@ -738,31 +622,30 @@ def activate_rebuild_env(tmpdir, pkg_name, rebuild_env): "SPACK_PIPELINE_TYPE": "spack_protected_branch", "CI_JOB_URL": rebuild_env.ci_job_url, "CI_PIPELINE_URL": rebuild_env.ci_pipeline_url, - "CI_PROJECT_DIR": tmpdir.join("ci-project").strpath, + "CI_PROJECT_DIR": str(tmp_path / "ci-project"), } ) @pytest.mark.parametrize("broken_tests", [True, False]) def test_ci_rebuild_mock_success( - tmpdir, + tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_gnupghome, - mock_stage, mock_fetch, mock_binary_index, monkeypatch, broken_tests, ): pkg_name = "archive-files" - rebuild_env = create_rebuild_env(tmpdir, pkg_name, broken_tests) + rebuild_env = create_rebuild_env(tmp_path, pkg_name, broken_tests) monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "echo") - with rebuild_env.env_dir.as_cwd(): - activate_rebuild_env(tmpdir, pkg_name, rebuild_env) + with working_dir(rebuild_env.env_dir): + activate_rebuild_env(tmp_path, pkg_name, rebuild_env) out = ci_cmd("rebuild", "--tests", fail_on_error=False) @@ -779,19 +662,18 @@ def test_ci_rebuild_mock_success( def test_ci_rebuild_mock_failure_to_push( - tmpdir, + tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_gnupghome, - mock_stage, mock_fetch, mock_binary_index, ci_base_environment, monkeypatch, ): pkg_name = "trivial-install-test-package" - rebuild_env = create_rebuild_env(tmpdir, pkg_name) + rebuild_env = create_rebuild_env(tmp_path, pkg_name) # Mock the install script succuess def mock_success(*args, **kwargs): @@ -807,103 +689,30 @@ def test_ci_rebuild_mock_failure_to_push( monkeypatch.setattr(spack.binary_distribution, "push_or_raise", mock_push_or_raise) - with rebuild_env.env_dir.as_cwd(): - activate_rebuild_env(tmpdir, pkg_name, rebuild_env) + with working_dir(rebuild_env.env_dir): + activate_rebuild_env(tmp_path, pkg_name, rebuild_env) expect = f"Command exited with code {FAILED_CREATE_BUILDCACHE_CODE}" with pytest.raises(spack.main.SpackCommandError, match=expect): ci_cmd("rebuild", fail_on_error=True) -@pytest.mark.skip(reason="fails intermittently and covered by gitlab ci") -def test_ci_rebuild( - tmpdir, +def test_ci_require_signing( + tmp_path: pathlib.Path, working_env, mutable_mock_env_path, - install_mockery, - mock_packages, - monkeypatch, mock_gnupghome, - mock_fetch, ci_base_environment, - mock_binary_index, -): - pkg_name = "archive-files" - rebuild_env = create_rebuild_env(tmpdir, pkg_name) - - # Create job directories to be removed before processing (for coverage) - os.makedirs(rebuild_env.log_dir) - os.makedirs(rebuild_env.repro_dir) - os.makedirs(rebuild_env.test_dir) - - with rebuild_env.env_dir.as_cwd(): - activate_rebuild_env(tmpdir, pkg_name, rebuild_env) - - ci_cmd("rebuild", "--tests", fail_on_error=False) - - monkeypatch.setattr(spack.cmd.ci, "SPACK_COMMAND", "notcommand") - monkeypatch.setattr(spack.cmd.ci, "INSTALL_FAIL_CODE", 127) - - with rebuild_env.env_dir.as_cwd(): - activate_rebuild_env(tmpdir, pkg_name, rebuild_env) - - expected_repro_files = [ - "install.sh", - "root.json", - "archive-files.json", - "spack.yaml", - "spack.lock", - ] - repro_files = os.listdir(rebuild_env.repro_dir) - assert all([f in repro_files for f in expected_repro_files]) - - install_script_path = os.path.join(rebuild_env.repro_dir, "install.sh") - install_line = None - with open(install_script_path) as fd: - for line in fd: - if line.startswith('"notcommand"'): - install_line = line - - assert install_line - - def mystrip(s): - return s.strip('"').rstrip("\n").rstrip('"') - - install_parts = [mystrip(s) for s in install_line.split(" ")] - - assert "--keep-stage" in install_parts - assert "--no-check-signature" not in install_parts - assert "-f" in install_parts - flag_index = install_parts.index("-f") - assert "archive-files.json" in install_parts[flag_index + 1] - - with open(rebuild_env.broken_spec_file) as fd: - broken_spec_content = fd.read() - assert rebuild_env.ci_job_url in broken_spec_content - assert rebuild_env.ci_pipeline_url in broken_spec_content - - # Ensure also produce CDash output for skipped (or notrun) tests - test_files = os.listdir(rebuild_env.test_dir) - with open(os.path.join(rebuild_env.test_dir, test_files[0]), "r") as f: - have = False - for line in f: - if "notrun" in line: - have = True - break - assert have - - env_cmd("deactivate") - - -def test_ci_require_signing( - tmpdir, working_env, mutable_mock_env_path, mock_gnupghome, ci_base_environment + monkeypatch, ): - spack_yaml_contents = """ + spack_yaml = tmp_path / "spack.yaml" + spack_yaml.write_text( + f""" spack: specs: - archive-files mirrors: - test-mirror: file:///no-such-mirror + test-mirror: {tmp_path / "ci-mirror"} ci: pipeline-gen: - submapping: @@ -914,50 +723,46 @@ spack: - donotcare image: donotcare """ - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): - env_cmd("activate", "--without-view", "--sh", "-d", ".") - - # Run without the variable to make sure we don't accidentally require signing - output = ci_cmd("rebuild", output=str, fail_on_error=False) - assert "spack must have exactly one signing key" not in output + ) + env_cmd("activate", "--without-view", "--sh", "-d", str(spack_yaml.parent)) - # Now run with the variable to make sure it works - os.environ.update({"SPACK_REQUIRE_SIGNING": "True"}) - output = ci_cmd("rebuild", output=str, fail_on_error=False) + # Run without the variable to make sure we don't accidentally require signing + output = ci_cmd("rebuild", output=str, fail_on_error=False) + assert "spack must have exactly one signing key" not in output - assert "spack must have exactly one signing key" in output + # Now run with the variable to make sure it works + monkeypatch.setenv("SPACK_REQUIRE_SIGNING", "True") + output = ci_cmd("rebuild", output=str, fail_on_error=False) + assert "spack must have exactly one signing key" in output + env_cmd("deactivate") def test_ci_nothing_to_rebuild( - tmpdir, + tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, - mock_packages, monkeypatch, mock_fetch, ci_base_environment, mock_binary_index, ): - working_dir = tmpdir.join("working_dir") - - mirror_dir = working_dir.join("mirror") - mirror_url = "file://{0}".format(mirror_dir.strpath) + scratch = tmp_path / "working_dir" + mirror_dir = scratch / "mirror" + mirror_url = mirror_dir.as_uri() - spack_yaml_contents = """ + with open(tmp_path / "spack.yaml", "w") as f: + f.write( + f""" spack: definitions: - packages: [archive-files] specs: - $packages mirrors: - test-mirror: {0} + test-mirror: {mirror_url} ci: - enable-artifacts-buildcache: True + enable-artifacts-buildcache: true pipeline-gen: - submapping: - match: @@ -966,37 +771,27 @@ spack: tags: - donotcare image: donotcare -""".format( - mirror_url - ) +""" + ) install_cmd("archive-files") buildcache_cmd("push", "-f", "-u", mirror_url, "archive-files") - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): + with working_dir(tmp_path): env_cmd("create", "test", "./spack.yaml") with ev.read("test") as env: env.concretize() - root_spec_dag_hash = None - - for h, s in env.specs_by_hash.items(): - if s.name == "archive-files": - root_spec_dag_hash = h # Create environment variables as gitlab would do it os.environ.update( { - "SPACK_ARTIFACTS_ROOT": working_dir.strpath, + "SPACK_ARTIFACTS_ROOT": str(scratch), "SPACK_JOB_LOG_DIR": "log_dir", "SPACK_JOB_REPRO_DIR": "repro_dir", "SPACK_JOB_TEST_DIR": "test_dir", - "SPACK_LOCAL_MIRROR_DIR": mirror_dir.strpath, - "SPACK_CONCRETE_ENV_DIR": tmpdir.strpath, - "SPACK_JOB_SPEC_DAG_HASH": root_spec_dag_hash, + "SPACK_LOCAL_MIRROR_DIR": str(mirror_dir), + "SPACK_CONCRETE_ENV_DIR": str(tmp_path), + "SPACK_JOB_SPEC_DAG_HASH": env.concrete_roots()[0].dag_hash(), "SPACK_JOB_SPEC_PKG_NAME": "archive-files", "SPACK_COMPILER_ACTION": "NONE", "SPACK_REMOTE_MIRROR_URL": mirror_url, @@ -1017,12 +812,10 @@ spack: def test_ci_generate_mirror_override( - tmpdir, + tmp_path: pathlib.Path, mutable_mock_env_path, install_mockery, - mock_packages, mock_fetch, - mock_stage, mock_binary_index, ci_base_environment, ): @@ -1030,20 +823,18 @@ def test_ci_generate_mirror_override( skip building specs that are not in the override mirror when they are found in the main mirror.""" os.environ.update({"SPACK_PIPELINE_TYPE": "spack_protected_branch"}) + mirror_url = (tmp_path / "mirror").as_uri() - working_dir = tmpdir.join("working_dir") - - mirror_dir = working_dir.join("mirror") - mirror_url = "file://{0}".format(mirror_dir.strpath) - - spack_yaml_contents = """ + with open(tmp_path / "spack.yaml", "w") as f: + f.write( + f""" spack: definitions: - packages: [patchelf] specs: - $packages mirrors: - test-mirror: {0} + test-mirror: {mirror_url} ci: pipeline-gen: - submapping: @@ -1057,18 +848,13 @@ spack: tags: - nonbuildtag image: basicimage -""".format( - mirror_url - ) - - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) +""" + ) - with tmpdir.as_cwd(): + with working_dir(tmp_path): env_cmd("create", "test", "./spack.yaml") - first_ci_yaml = str(tmpdir.join(".gitlab-ci-1.yml")) - second_ci_yaml = str(tmpdir.join(".gitlab-ci-2.yml")) + first_ci_yaml = str(tmp_path / ".gitlab-ci-1.yml") + second_ci_yaml = str(tmp_path / ".gitlab-ci-2.yml") with ev.read("test"): install_cmd() buildcache_cmd("push", "-u", mirror_url, "patchelf") @@ -1088,7 +874,7 @@ spack: "--output-file", second_ci_yaml, "--buildcache-destination", - "file:///mirror/not/exist", + (tmp_path / "does-not-exist").as_uri(), ) with open(first_ci_yaml) as fd1: @@ -1102,24 +888,21 @@ spack: @pytest.mark.disable_clean_stage_check def test_push_to_build_cache( - tmpdir, + tmp_path: pathlib.Path, mutable_mock_env_path, install_mockery, - mock_packages, mock_fetch, - mock_stage, mock_gnupghome, ci_base_environment, mock_binary_index, ): - working_dir = tmpdir.join("working_dir") - - mirror_dir = working_dir.join("mirror") - mirror_url = url_util.path_to_file_url(mirror_dir.strpath) + scratch = tmp_path / "working_dir" + mirror_dir = scratch / "mirror" + mirror_url = mirror_dir.as_uri() ci.import_signing_key(_signing_key()) - with tmpdir.as_cwd(): + with working_dir(tmp_path): with open("spack.yaml", "w") as f: f.write( f"""\ @@ -1155,7 +938,7 @@ spack: with ev.read("test"): concrete_spec = Spec("patchelf").concretized() spec_json = concrete_spec.to_json(hash=ht.dag_hash) - json_path = str(tmpdir.join("spec.json")) + json_path = str(tmp_path / "spec.json") with open(json_path, "w") as ypfd: ypfd.write(spec_json) @@ -1164,12 +947,10 @@ spack: for s in concrete_spec.traverse(): ci.push_to_build_cache(s, mirror_url, True) - buildcache_path = os.path.join(mirror_dir.strpath, "build_cache") - # Now test the --prune-dag (default) option of spack ci generate mirror_cmd("add", "test-ci", mirror_url) - outputfile_pruned = str(tmpdir.join("pruned_pipeline.yml")) + outputfile_pruned = str(tmp_path / "pruned_pipeline.yml") ci_cmd("generate", "--output-file", outputfile_pruned) with open(outputfile_pruned) as f: @@ -1188,7 +969,7 @@ spack: assert "rules" in yaml_contents["workflow"] assert yaml_contents["workflow"]["rules"] == [{"when": "always"}] - outputfile_not_pruned = str(tmpdir.join("unpruned_pipeline.yml")) + outputfile_not_pruned = str(tmp_path / "unpruned_pipeline.yml") ci_cmd("generate", "--no-prune-dag", "--output-file", outputfile_not_pruned) # Test the --no-prune-dag option of spack ci generate @@ -1214,45 +995,27 @@ spack: # Test generating buildcache index while we have bin mirror buildcache_cmd("update-index", mirror_url) - index_path = os.path.join(buildcache_path, "index.json") - with open(index_path) as idx_fd: + with open(mirror_dir / "build_cache" / "index.json") as idx_fd: index_object = json.load(idx_fd) jsonschema.validate(index_object, db_idx_schema) # Now that index is regenerated, validate "buildcache list" output - buildcache_list_output = buildcache_cmd("list", output=str) - assert "patchelf" in buildcache_list_output + assert "patchelf" in buildcache_cmd("list", output=str) # Also test buildcache_spec schema - bc_files_list = os.listdir(buildcache_path) - for file_name in bc_files_list: + for file_name in os.listdir(mirror_dir / "build_cache"): if file_name.endswith(".spec.json.sig"): - spec_json_path = os.path.join(buildcache_path, file_name) - with open(spec_json_path) as json_fd: - json_object = Spec.extract_json_from_clearsig(json_fd.read()) - jsonschema.validate(json_object, specfile_schema) - - logs_dir = working_dir.join("logs_dir") - if not os.path.exists(logs_dir.strpath): - os.makedirs(logs_dir.strpath) - - ci.copy_stage_logs_to_artifacts(concrete_spec, logs_dir.strpath) - - logs_dir_list = os.listdir(logs_dir.strpath) - - assert "spack-build-out.txt" in logs_dir_list + with open(mirror_dir / "build_cache" / file_name) as f: + spec_dict = Spec.extract_json_from_clearsig(f.read()) + jsonschema.validate(spec_dict, specfile_schema) - # Also just make sure that if something goes wrong with the - # stage logs copy, no exception is thrown - ci.copy_stage_logs_to_artifacts(concrete_spec, None) - ci.copy_stage_logs_to_artifacts(None, logs_dir.strpath) + logs_dir = scratch / "logs_dir" + logs_dir.mkdir() + ci.copy_stage_logs_to_artifacts(concrete_spec, str(logs_dir)) + assert "spack-build-out.txt" in os.listdir(logs_dir) - dl_dir = working_dir.join("download_dir") - if not os.path.exists(dl_dir.strpath): - os.makedirs(dl_dir.strpath) - buildcache_cmd("download", "--spec-file", json_path, "--path", dl_dir.strpath) - dl_dir_list = os.listdir(dl_dir.strpath) - - assert len(dl_dir_list) == 2 + dl_dir = scratch / "download_dir" + buildcache_cmd("download", "--spec-file", json_path, "--path", str(dl_dir)) + assert len(os.listdir(dl_dir)) == 2 def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys): @@ -1270,32 +1033,26 @@ def test_push_to_build_cache_exceptions(monkeypatch, tmp_path, capsys): @pytest.mark.parametrize("match_behavior", ["first", "merge"]) @pytest.mark.parametrize("git_version", ["big ol commit sha", None]) def test_ci_generate_override_runner_attrs( - tmpdir, - mutable_mock_env_path, - install_mockery, - mock_packages, - monkeypatch, - ci_base_environment, - match_behavior, - git_version, + ci_generate_test, tmp_path, monkeypatch, match_behavior, git_version ): """Test that we get the behavior we want with respect to the provision of runner attributes like tags, variables, and scripts, both when we inherit them from the top level, as well as when we override one or more at the runner level""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + monkeypatch.setattr(spack, "spack_version", "0.20.0.test0") + monkeypatch.setattr(spack.main, "get_version", lambda: "0.20.0.test0 (blah)") + monkeypatch.setattr(spack.main, "get_spack_commit", lambda: git_version) + spack_yaml, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - flatten-deps - pkg-a mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {tmp_path / "ci-mirror"} ci: pipeline-gen: - - match_behavior: {0} + - match_behavior: {match_behavior} submapping: - match: - flatten-deps @@ -1345,109 +1102,89 @@ spack: - cleanup-job: image: donotcare tags: [donotcare] -""".format( - match_behavior - ) - ) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - monkeypatch.setattr(spack, "spack_version", "0.20.0.test0") - monkeypatch.setattr(spack.main, "get_version", lambda: "0.20.0.test0 (blah)") - monkeypatch.setattr(spack.main, "get_spack_commit", lambda: git_version) - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as f: - contents = f.read() - yaml_contents = syaml.load(contents) +""" + ) - assert "variables" in yaml_contents - global_vars = yaml_contents["variables"] - assert "SPACK_VERSION" in global_vars - assert global_vars["SPACK_VERSION"] == "0.20.0.test0 (blah)" - assert "SPACK_CHECKOUT_VERSION" in global_vars - assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0" - - for ci_key in yaml_contents.keys(): - if ci_key.startswith("pkg-a"): - # Make sure pkg-a's attributes override variables, and all the - # scripts. Also, make sure the 'toplevel' tag doesn't - # appear twice, but that a's specific extra tag does appear - the_elt = yaml_contents[ci_key] - assert the_elt["variables"]["ONE"] == "specificvarone" - assert the_elt["variables"]["TWO"] == "specificvartwo" - assert "THREE" not in the_elt["variables"] - assert len(the_elt["tags"]) == (2 if match_behavior == "first" else 3) - assert "specific-a" in the_elt["tags"] - if match_behavior == "merge": - assert "specific-a-2" in the_elt["tags"] - assert "toplevel" in the_elt["tags"] - assert "toplevel2" not in the_elt["tags"] - assert len(the_elt["before_script"]) == 1 - assert the_elt["before_script"][0] == "custom pre step one" - assert len(the_elt["script"]) == 1 - assert the_elt["script"][0] == "custom main step" - assert len(the_elt["after_script"]) == 1 - assert the_elt["after_script"][0] == "custom post step one" - if "dependency-install" in ci_key: - # Since the dependency-install match omits any - # runner-attributes, make sure it inherited all the - # top-level attributes. - the_elt = yaml_contents[ci_key] - assert the_elt["variables"]["ONE"] == "toplevelvarone" - assert the_elt["variables"]["TWO"] == "toplevelvartwo" - assert "THREE" not in the_elt["variables"] - assert len(the_elt["tags"]) == 2 - assert "toplevel" in the_elt["tags"] - assert "toplevel2" in the_elt["tags"] - assert len(the_elt["before_script"]) == 2 - assert the_elt["before_script"][0] == "pre step one" - assert the_elt["before_script"][1] == "pre step two" - assert len(the_elt["script"]) == 1 - assert the_elt["script"][0] == "main step" - assert len(the_elt["after_script"]) == 1 - assert the_elt["after_script"][0] == "post step one" - if "flatten-deps" in ci_key: - # The flatten-deps match specifies that we keep the two - # top level variables, but add a third specifc one. It - # also adds a custom tag which should be combined with - # the top-level tag. - the_elt = yaml_contents[ci_key] - assert the_elt["variables"]["ONE"] == "toplevelvarone" - assert the_elt["variables"]["TWO"] == "toplevelvartwo" - assert the_elt["variables"]["THREE"] == "specificvarthree" - assert len(the_elt["tags"]) == 3 - assert "specific-one" in the_elt["tags"] - assert "toplevel" in the_elt["tags"] - assert "toplevel2" in the_elt["tags"] - assert len(the_elt["before_script"]) == 2 - assert the_elt["before_script"][0] == "pre step one" - assert the_elt["before_script"][1] == "pre step two" - assert len(the_elt["script"]) == 1 - assert the_elt["script"][0] == "main step" - assert len(the_elt["after_script"]) == 1 - assert the_elt["after_script"][0] == "post step one" + yaml_contents = syaml.load(outputfile.read_text()) + + assert "variables" in yaml_contents + global_vars = yaml_contents["variables"] + assert "SPACK_VERSION" in global_vars + assert global_vars["SPACK_VERSION"] == "0.20.0.test0 (blah)" + assert "SPACK_CHECKOUT_VERSION" in global_vars + assert global_vars["SPACK_CHECKOUT_VERSION"] == git_version or "v0.20.0.test0" + + for ci_key in yaml_contents.keys(): + if ci_key.startswith("pkg-a"): + # Make sure pkg-a's attributes override variables, and all the + # scripts. Also, make sure the 'toplevel' tag doesn't + # appear twice, but that a's specific extra tag does appear + the_elt = yaml_contents[ci_key] + assert the_elt["variables"]["ONE"] == "specificvarone" + assert the_elt["variables"]["TWO"] == "specificvartwo" + assert "THREE" not in the_elt["variables"] + assert len(the_elt["tags"]) == (2 if match_behavior == "first" else 3) + assert "specific-a" in the_elt["tags"] + if match_behavior == "merge": + assert "specific-a-2" in the_elt["tags"] + assert "toplevel" in the_elt["tags"] + assert "toplevel2" not in the_elt["tags"] + assert len(the_elt["before_script"]) == 1 + assert the_elt["before_script"][0] == "custom pre step one" + assert len(the_elt["script"]) == 1 + assert the_elt["script"][0] == "custom main step" + assert len(the_elt["after_script"]) == 1 + assert the_elt["after_script"][0] == "custom post step one" + if "dependency-install" in ci_key: + # Since the dependency-install match omits any + # runner-attributes, make sure it inherited all the + # top-level attributes. + the_elt = yaml_contents[ci_key] + assert the_elt["variables"]["ONE"] == "toplevelvarone" + assert the_elt["variables"]["TWO"] == "toplevelvartwo" + assert "THREE" not in the_elt["variables"] + assert len(the_elt["tags"]) == 2 + assert "toplevel" in the_elt["tags"] + assert "toplevel2" in the_elt["tags"] + assert len(the_elt["before_script"]) == 2 + assert the_elt["before_script"][0] == "pre step one" + assert the_elt["before_script"][1] == "pre step two" + assert len(the_elt["script"]) == 1 + assert the_elt["script"][0] == "main step" + assert len(the_elt["after_script"]) == 1 + assert the_elt["after_script"][0] == "post step one" + if "flatten-deps" in ci_key: + # The flatten-deps match specifies that we keep the two + # top level variables, but add a third specifc one. It + # also adds a custom tag which should be combined with + # the top-level tag. + the_elt = yaml_contents[ci_key] + assert the_elt["variables"]["ONE"] == "toplevelvarone" + assert the_elt["variables"]["TWO"] == "toplevelvartwo" + assert the_elt["variables"]["THREE"] == "specificvarthree" + assert len(the_elt["tags"]) == 3 + assert "specific-one" in the_elt["tags"] + assert "toplevel" in the_elt["tags"] + assert "toplevel2" in the_elt["tags"] + assert len(the_elt["before_script"]) == 2 + assert the_elt["before_script"][0] == "pre step one" + assert the_elt["before_script"][1] == "pre step two" + assert len(the_elt["script"]) == 1 + assert the_elt["script"][0] == "main step" + assert len(the_elt["after_script"]) == 1 + assert the_elt["after_script"][0] == "post step one" -@pytest.mark.disable_clean_stage_check def test_ci_rebuild_index( - tmpdir, - working_env, - mutable_mock_env_path, - install_mockery, - mock_packages, - mock_fetch, - mock_stage, + tmp_path: pathlib.Path, working_env, mutable_mock_env_path, install_mockery, mock_fetch ): - working_dir = tmpdir.join("working_dir") - - mirror_dir = working_dir.join("mirror") - mirror_url = url_util.path_to_file_url(str(mirror_dir)) + scratch = tmp_path / "working_dir" + mirror_dir = scratch / "mirror" + mirror_url = mirror_dir.as_uri() - spack_yaml_contents = f""" + with open(tmp_path / "spack.yaml", "w") as f: + f.write( + f""" spack: specs: - callpath @@ -1463,29 +1200,21 @@ spack: - donotcare image: donotcare """ + ) - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): + with working_dir(tmp_path): env_cmd("create", "test", "./spack.yaml") with ev.read("test"): concrete_spec = Spec("callpath").concretized() - spec_json = concrete_spec.to_json(hash=ht.dag_hash) - json_path = str(tmpdir.join("spec.json")) - with open(json_path, "w") as ypfd: - ypfd.write(spec_json) + with open(tmp_path / "spec.json", "w") as f: + f.write(concrete_spec.to_json(hash=ht.dag_hash)) - install_cmd("--add", "--keep-stage", "-f", json_path) + install_cmd("--add", "-f", str(tmp_path / "spec.json")) buildcache_cmd("push", "-u", "-f", mirror_url, "callpath") ci_cmd("rebuild-index") - buildcache_path = os.path.join(mirror_dir.strpath, "build_cache") - index_path = os.path.join(buildcache_path, "index.json") - with open(index_path) as idx_fd: - index_object = json.load(idx_fd) - jsonschema.validate(index_object, db_idx_schema) + with open(mirror_dir / "build_cache" / "index.json") as f: + jsonschema.validate(json.load(f), db_idx_schema) def test_ci_get_stack_changed(mock_git_repo, monkeypatch): @@ -1495,148 +1224,68 @@ def test_ci_get_stack_changed(mock_git_repo, monkeypatch): assert ci.get_stack_changed("/no/such/env/path") is True -def test_ci_generate_prune_untouched( - tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment, monkeypatch -): +def test_ci_generate_prune_untouched(ci_generate_test, tmp_path, monkeypatch): """Test pipeline generation with pruning works to eliminate specs that were not affected by a change""" - os.environ.update({"SPACK_PRUNE_UNTOUCHED": "TRUE"}) # enables pruning of untouched specs - mirror_url = "https://my.fake.mirror" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + monkeypatch.setenv("SPACK_PRUNE_UNTOUCHED", "TRUE") # enables pruning of untouched specs + + def fake_compute_affected(r1=None, r2=None): + return ["libdwarf"] + + def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): + return False + + monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected) + monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed) + + spack_yaml, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - archive-files - callpath mirrors: - some-mirror: {0} + some-mirror: {tmp_path / 'ci-mirror'} ci: pipeline-gen: - build-job: tags: - donotcare image: donotcare -""".format( - mirror_url - ) - ) +""" + ) # Dependency graph rooted at callpath # callpath -> dyninst -> libelf # -> libdwarf -> libelf # -> mpich + env_hashes = {} + with ev.read("test") as active_env: + active_env.concretize() + for s in active_env.all_specs(): + env_hashes[s.name] = s.dag_hash() - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - def fake_compute_affected(r1=None, r2=None): - return ["libdwarf"] - - def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): - return False - - env_hashes = {} - - with ev.read("test") as active_env: - monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected) - monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed) + yaml_contents = syaml.load(outputfile.read_text()) - active_env.concretize() + generated_hashes = [] + for ci_key in yaml_contents.keys(): + if "variables" in yaml_contents[ci_key]: + generated_hashes.append(yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"]) - for s in active_env.all_specs(): - env_hashes[s.name] = s.dag_hash() - - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as f: - contents = f.read() - print(contents) - yaml_contents = syaml.load(contents) - - generated_hashes = [] - - for ci_key in yaml_contents.keys(): - if "variables" in yaml_contents[ci_key]: - generated_hashes.append( - yaml_contents[ci_key]["variables"]["SPACK_JOB_SPEC_DAG_HASH"] - ) - - assert env_hashes["archive-files"] not in generated_hashes - for spec_name in ["callpath", "dyninst", "mpich", "libdwarf", "libelf"]: - assert env_hashes[spec_name] in generated_hashes - - -def test_ci_generate_prune_env_vars( - tmpdir, mutable_mock_env_path, install_mockery, mock_packages, ci_base_environment, monkeypatch -): - """Make sure environment variables controlling untouched spec - pruning behave as expected.""" - os.environ.update({"SPACK_PRUNE_UNTOUCHED": "TRUE"}) # enables pruning of untouched specs - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ -spack: - specs: - - libelf - gitlab-ci: - mappings: - - match: - - arch=test-debian6-core2 - runner-attributes: - tags: - - donotcare - image: donotcare -""" - ) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - - def fake_compute_affected(r1=None, r2=None): - return ["libdwarf"] - - def fake_stack_changed(env_path, rev1="HEAD^", rev2="HEAD"): - return False - - expected_depth_param = None - - def check_get_spec_filter_list(env, affected_pkgs, dependent_traverse_depth=None): - assert dependent_traverse_depth == expected_depth_param - return set() - - monkeypatch.setattr(ci, "compute_affected_packages", fake_compute_affected) - monkeypatch.setattr(ci, "get_stack_changed", fake_stack_changed) - monkeypatch.setattr(ci, "get_spec_filter_list", check_get_spec_filter_list) - - expectations = {"-1": -1, "0": 0, "True": None} - - for key, val in expectations.items(): - with ev.read("test"): - os.environ.update({"SPACK_PRUNE_UNTOUCHED_DEPENDENT_DEPTH": key}) - expected_depth_param = val - # Leaving out the mirror in the spack.yaml above means the - # pipeline generation command will fail, pretty much immediately. - # But for this test, we only care how the environment variables - # for pruning are handled, the faster the better. So allow the - # spack command to fail. - ci_cmd("generate", fail_on_error=False) + assert env_hashes["archive-files"] not in generated_hashes + for spec_name in ["callpath", "dyninst", "mpich", "libdwarf", "libelf"]: + assert env_hashes[spec_name] in generated_hashes def test_ci_subcommands_without_mirror( - tmpdir, + tmp_path: pathlib.Path, mutable_mock_env_path, - mock_packages, install_mockery, ci_base_environment, mock_binary_index, ): """Make sure we catch if there is not a mirror and report an error""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: + with open(tmp_path / "spack.yaml", "w") as f: f.write( """\ spack: @@ -1654,22 +1303,23 @@ spack: """ ) - with tmpdir.as_cwd(): + with working_dir(tmp_path): env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) with ev.read("test"): # Check the 'generate' subcommand output = ci_cmd( - "generate", "--output-file", outputfile, output=str, fail_on_error=False + "generate", + "--output-file", + str(tmp_path / ".gitlab-ci.yml"), + output=str, + fail_on_error=False, ) - ex = "spack ci generate requires an env containing a mirror" - assert ex in output + assert "spack ci generate requires an env containing a mirror" in output # Also check the 'rebuild-index' subcommand output = ci_cmd("rebuild-index", output=str, fail_on_error=False) - ex = "spack ci rebuild-index requires an env containing a mirror" - assert ex in output + assert "spack ci rebuild-index requires an env containing a mirror" in output def test_ensure_only_one_temporary_storage(): @@ -1689,11 +1339,8 @@ def test_ensure_only_one_temporary_storage(): enable_artifacts = "enable-artifacts-buildcache: True" temp_storage = "temporary-storage-url-prefix: file:///temp/mirror" - specify_both = """{0} - {1} -""".format( - enable_artifacts, temp_storage - ) + specify_both = f"{enable_artifacts}\n {temp_storage}" + specify_neither = "" # User can specify "enable-artifacts-buildcache" (boolean) @@ -1715,27 +1362,17 @@ def test_ensure_only_one_temporary_storage(): jsonschema.validate(yaml_obj, ci_schema) -def test_ci_generate_temp_storage_url( - tmpdir, - mutable_mock_env_path, - install_mockery, - mock_packages, - monkeypatch, - ci_base_environment, - mock_binary_index, -): +def test_ci_generate_temp_storage_url(ci_generate_test, tmp_path, mock_binary_index): """Verify correct behavior when using temporary-storage-url-prefix""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + _, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - archive-files mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {(tmp_path / "ci-mirror").as_uri()} ci: - temporary-storage-url-prefix: file:///work/temp/mirror + temporary-storage-url-prefix: {(tmp_path / "temp-mirror").as_uri()} pipeline-gen: - submapping: - match: @@ -1747,38 +1384,32 @@ spack: - cleanup-job: custom_attribute: custom! """ - ) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as of: - pipeline_doc = syaml.load(of.read()) - - assert "cleanup" in pipeline_doc - cleanup_job = pipeline_doc["cleanup"] - - assert cleanup_job["custom_attribute"] == "custom!" + ) + yaml_contents = syaml.load(outputfile.read_text()) - assert "script" in cleanup_job - cleanup_task = cleanup_job["script"][0] + assert "cleanup" in yaml_contents - assert cleanup_task.startswith("spack -d mirror destroy") + cleanup_job = yaml_contents["cleanup"] + assert cleanup_job["custom_attribute"] == "custom!" + assert "script" in cleanup_job - assert "stages" in pipeline_doc - stages = pipeline_doc["stages"] + cleanup_task = cleanup_job["script"][0] + assert cleanup_task.startswith("spack -d mirror destroy") - # Cleanup job should be 2nd to last, just before rebuild-index - assert "stage" in cleanup_job - assert cleanup_job["stage"] == stages[-2] + assert "stages" in yaml_contents + stages = yaml_contents["stages"] + # Cleanup job should be 2nd to last, just before rebuild-index + assert "stage" in cleanup_job + assert cleanup_job["stage"] == stages[-2] def test_ci_generate_read_broken_specs_url( - tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment + tmp_path: pathlib.Path, + mutable_mock_env_path, + install_mockery, + mock_packages, + monkeypatch, + ci_base_environment, ): """Verify that `broken-specs-url` works as intended""" spec_a = Spec("pkg-a") @@ -1789,7 +1420,7 @@ def test_ci_generate_read_broken_specs_url( spec_flattendeps.concretize() flattendeps_dag_hash = spec_flattendeps.dag_hash() - broken_specs_url = "file://{0}".format(tmpdir.strpath) + broken_specs_url = tmp_path.as_uri() # Mark 'a' as broken (but not 'flatten-deps') broken_spec_a_url = "{0}/{1}".format(broken_specs_url, a_dag_hash) @@ -1800,18 +1431,17 @@ def test_ci_generate_read_broken_specs_url( ) # Test that `spack ci generate` notices this broken spec and fails. - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: + with open(tmp_path / "spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - flatten-deps - pkg-a mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {(tmp_path / "ci-mirror").as_uri()} ci: - broken-specs-url: "{0}" + broken-specs-url: "{broken_specs_url}" pipeline-gen: - submapping: - match: @@ -1823,45 +1453,40 @@ spack: tags: - donotcare image: donotcare -""".format( - broken_specs_url - ) +""" ) - with tmpdir.as_cwd(): + with working_dir(tmp_path): env_cmd("create", "test", "./spack.yaml") with ev.read("test"): # Check output of the 'generate' subcommand output = ci_cmd("generate", output=str, fail_on_error=False) assert "known to be broken" in output - expected = "{0}/{1} (in stack {2}) was reported broken here: {3}".format( - spec_a.name, a_dag_hash[:7], job_stack, a_job_url + expected = ( + f"{spec_a.name}/{a_dag_hash[:7]} (in stack {job_stack}) was " + f"reported broken here: {a_job_url}" ) assert expected in output - not_expected = "flatten-deps/{0} (in stack".format(flattendeps_dag_hash[:7]) + not_expected = f"flatten-deps/{flattendeps_dag_hash[:7]} (in stack" assert not_expected not in output -def test_ci_generate_external_signing_job( - tmpdir, mutable_mock_env_path, install_mockery, mock_packages, monkeypatch, ci_base_environment -): +def test_ci_generate_external_signing_job(ci_generate_test, tmp_path, monkeypatch): """Verify that in external signing mode: 1) each rebuild jobs includes the location where the binary hash information is written and 2) we properly generate a final signing job in the pipeline.""" - os.environ.update({"SPACK_PIPELINE_TYPE": "spack_protected_branch"}) - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + monkeypatch.setenv("SPACK_PIPELINE_TYPE", "spack_protected_branch") + _, outputfile, _ = ci_generate_test( + f"""\ spack: specs: - archive-files mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {(tmp_path / "ci-mirror").as_uri()} ci: - temporary-storage-url-prefix: file:///work/temp/mirror + temporary-storage-url-prefix: {(tmp_path / "temp-mirror").as_uri()} pipeline-gen: - submapping: - match: @@ -1883,48 +1508,40 @@ spack: - echo hello custom_attribute: custom! """ - ) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) - - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) - - with open(outputfile) as of: - pipeline_doc = syaml.load(of.read()) + ) + yaml_contents = syaml.load(outputfile.read_text()) - assert "sign-pkgs" in pipeline_doc - signing_job = pipeline_doc["sign-pkgs"] - assert "tags" in signing_job - signing_job_tags = signing_job["tags"] - for expected_tag in ["notary", "protected", "aws"]: - assert expected_tag in signing_job_tags - assert signing_job["custom_attribute"] == "custom!" + assert "sign-pkgs" in yaml_contents + signing_job = yaml_contents["sign-pkgs"] + assert "tags" in signing_job + signing_job_tags = signing_job["tags"] + for expected_tag in ["notary", "protected", "aws"]: + assert expected_tag in signing_job_tags + assert signing_job["custom_attribute"] == "custom!" def test_ci_reproduce( - tmpdir, + tmp_path: pathlib.Path, mutable_mock_env_path, install_mockery, - mock_packages, monkeypatch, last_two_git_commits, ci_base_environment, mock_binary_index, ): - working_dir = tmpdir.join("repro_dir") + repro_dir = tmp_path / "repro_dir" image_name = "org/image:tag" - spack_yaml_contents = """ + with open(tmp_path / "spack.yaml", "w") as f: + f.write( + f""" spack: definitions: - packages: [archive-files] specs: - $packages mirrors: - test-mirror: file:///some/fake/mirror + test-mirror: {tmp_path / "ci-mirror"} ci: pipeline-gen: - submapping: @@ -1933,65 +1550,49 @@ spack: build-job: tags: - donotcare - image: {0} -""".format( - image_name - ) - - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(spack_yaml_contents) - - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - with ev.read("test") as env: - with env.write_transaction(): - env.concretize() - env.write() - - if not os.path.exists(working_dir.strpath): - os.makedirs(working_dir.strpath) - - shutil.copyfile(env.manifest_path, os.path.join(working_dir.strpath, "spack.yaml")) - shutil.copyfile(env.lock_path, os.path.join(working_dir.strpath, "spack.lock")) + image: {image_name} +""" + ) - job_spec = None + with working_dir(tmp_path), ev.Environment(".") as env: + env.concretize() + env.write() - for h, s in env.specs_by_hash.items(): - if s.name == "archive-files": - job_spec = s + repro_dir.mkdir() - job_spec_json_path = os.path.join(working_dir.strpath, "archivefiles.json") - with open(job_spec_json_path, "w") as fd: - fd.write(job_spec.to_json(hash=ht.dag_hash)) + job_spec = env.concrete_roots()[0] + with open(repro_dir / "archivefiles.json", "w") as f: + f.write(job_spec.to_json(hash=ht.dag_hash)) - artifacts_root = os.path.join(working_dir.strpath, "scratch_dir") - pipeline_path = os.path.join(artifacts_root, "pipeline.yml") + artifacts_root = repro_dir / "scratch_dir" + pipeline_path = artifacts_root / "pipeline.yml" - ci_cmd("generate", "--output-file", pipeline_path, "--artifacts-root", artifacts_root) + ci_cmd( + "generate", + "--output-file", + str(pipeline_path), + "--artifacts-root", + str(artifacts_root), + ) - job_name = ci.get_job_name(job_spec) + job_name = ci.get_job_name(job_spec) - repro_file = os.path.join(working_dir.strpath, "repro.json") - repro_details = { - "job_name": job_name, - "job_spec_json": "archivefiles.json", - "ci_project_dir": working_dir.strpath, - } - with open(repro_file, "w") as fd: - fd.write(json.dumps(repro_details)) + with open(repro_dir / "repro.json", "w") as f: + f.write( + json.dumps( + { + "job_name": job_name, + "job_spec_json": "archivefiles.json", + "ci_project_dir": str(repro_dir), + } + ) + ) - install_script = os.path.join(working_dir.strpath, "install.sh") - with open(install_script, "w") as fd: - fd.write("#!/bin/sh\n\n#fake install\nspack install blah\n") + with open(repro_dir / "install.sh", "w") as f: + f.write("#!/bin/sh\n\n#fake install\nspack install blah\n") - spack_info_file = os.path.join(working_dir.strpath, "spack_info.txt") - with open(spack_info_file, "w") as fd: - fd.write( - "\nMerge {0} into {1}\n\n".format( - last_two_git_commits[1], last_two_git_commits[0] - ) - ) + with open(repro_dir / "spack_info.txt", "w") as f: + f.write(f"\nMerge {last_two_git_commits[1]} into {last_two_git_commits[0]}\n\n") def fake_download_and_extract_artifacts(url, work_dir): pass @@ -1999,16 +1600,16 @@ spack: monkeypatch.setattr(ci, "download_and_extract_artifacts", fake_download_and_extract_artifacts) rep_out = ci_cmd( "reproduce-build", - "https://some.domain/api/v1/projects/1/jobs/2/artifacts", + "https://example.com/api/v1/projects/1/jobs/2/artifacts", "--working-dir", - working_dir.strpath, + str(repro_dir), output=str, ) # Make sure the script was generated - assert os.path.exists(os.path.join(os.path.realpath(working_dir.strpath), "start.sh")) - # Make sure we tell the suer where it is when not in interactive mode - expect_out = "$ {0}/start.sh".format(os.path.realpath(working_dir.strpath)) - assert expect_out in rep_out + assert (repro_dir / "start.sh").exists() + + # Make sure we tell the user where it is when not in interactive mode + assert f"$ {repro_dir}/start.sh" in rep_out @pytest.mark.parametrize( @@ -2086,8 +1687,8 @@ spack: - matrix: - [$old-gcc-pkgs] mirrors: - test-mirror: file:///some/fake/mirror - {0}: + test-mirror: {mirror_url} + {key}: match_behavior: first mappings: - match: @@ -2115,29 +1716,23 @@ spack: @pytest.mark.regression("36409") def test_gitlab_ci_deprecated( - tmpdir, + tmp_path: pathlib.Path, mutable_mock_env_path, install_mockery, - mock_packages, monkeypatch, ci_base_environment, mock_binary_index, ): - mirror_url = "file:///some/fake/mirror" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(legacy_spack_yaml_contents.format("gitlab-ci")) + mirror_url = (tmp_path / "ci-mirror").as_uri() + with open(tmp_path / "spack.yaml", "w") as f: + f.write(legacy_spack_yaml_contents.format(mirror_url=mirror_url, key="gitlab-ci")) - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = "generated-pipeline.yaml" - - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) + with working_dir(tmp_path): + with ev.Environment("."): + ci_cmd("generate", "--output-file", "generated-pipeline.yaml") - with open(outputfile) as f: - contents = f.read() - yaml_contents = syaml.load(contents) + with open("generated-pipeline.yaml") as f: + yaml_contents = syaml.load(f) assert "stages" in yaml_contents assert len(yaml_contents["stages"]) == 5 @@ -2146,7 +1741,7 @@ def test_gitlab_ci_deprecated( assert "rebuild-index" in yaml_contents rebuild_job = yaml_contents["rebuild-index"] - expected = "spack buildcache update-index --keys {0}".format(mirror_url) + expected = f"spack buildcache update-index --keys {mirror_url}" assert rebuild_job["script"][0] == expected assert "variables" in yaml_contents @@ -2157,100 +1752,82 @@ def test_gitlab_ci_deprecated( @pytest.mark.regression("36045") def test_gitlab_ci_update( - tmpdir, + tmp_path: pathlib.Path, mutable_mock_env_path, install_mockery, - mock_packages, monkeypatch, ci_base_environment, mock_binary_index, ): - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write(legacy_spack_yaml_contents.format("ci")) - - with tmpdir.as_cwd(): - env_cmd("update", "-y", ".") - - with open("spack.yaml") as f: - contents = f.read() - yaml_contents = syaml.load(contents) + with open(tmp_path / "spack.yaml", "w") as f: + f.write( + legacy_spack_yaml_contents.format(mirror_url=(tmp_path / "mirror").as_uri(), key="ci") + ) - ci_root = yaml_contents["spack"]["ci"] + env_cmd("update", "-y", str(tmp_path)) - assert "pipeline-gen" in ci_root + with open(tmp_path / "spack.yaml") as f: + yaml_contents = syaml.load(f) + ci_root = yaml_contents["spack"]["ci"] + assert "pipeline-gen" in ci_root -def test_gitlab_config_scopes( - tmpdir, working_env, mutable_mock_env_path, mock_packages, ci_base_environment -): +def test_gitlab_config_scopes(ci_generate_test, tmp_path): """Test pipeline generation with real configs included""" configs_path = os.path.join(spack_paths.share_path, "gitlab", "cloud_pipelines", "configs") - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: - f.write( - """\ + _, outputfile, _ = ci_generate_test( + f"""\ spack: config: - install_tree: {0} - include: [{1}] + install_tree: {tmp_path / "opt"} + include: [{configs_path}] view: false specs: - flatten-deps mirrors: - some-mirror: https://my.fake.mirror + some-mirror: {tmp_path / "ci-mirror"} ci: pipeline-gen: - build-job: image: "ecpe4s/ubuntu20.04-runner-x86_64:2023-01-01" tags: ["some_tag"] -""".format( - tmpdir.strpath, configs_path - ) - ) +""" + ) - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) + yaml_contents = syaml.load(outputfile.read_text()) - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) + assert "rebuild-index" in yaml_contents - with open(outputfile) as f: - contents = f.read() - yaml_contents = syaml.load(contents) + rebuild_job = yaml_contents["rebuild-index"] + assert "tags" in rebuild_job + assert "variables" in rebuild_job - assert "rebuild-index" in yaml_contents - rebuild_job = yaml_contents["rebuild-index"] - assert "tags" in rebuild_job - assert "variables" in rebuild_job - rebuild_tags = rebuild_job["tags"] - rebuild_vars = rebuild_job["variables"] - assert all([t in rebuild_tags for t in ["spack", "service"]]) - expected_vars = ["CI_JOB_SIZE", "KUBERNETES_CPU_REQUEST", "KUBERNETES_MEMORY_REQUEST"] - assert all([v in rebuild_vars for v in expected_vars]) + rebuild_tags = rebuild_job["tags"] + rebuild_vars = rebuild_job["variables"] + assert all([t in rebuild_tags for t in ["spack", "service"]]) + expected_vars = ["CI_JOB_SIZE", "KUBERNETES_CPU_REQUEST", "KUBERNETES_MEMORY_REQUEST"] + assert all([v in rebuild_vars for v in expected_vars]) def test_ci_generate_mirror_config( - tmpdir, + tmp_path: pathlib.Path, mutable_mock_env_path, install_mockery, - mock_packages, monkeypatch, ci_base_environment, mock_binary_index, ): """Make sure the correct mirror gets used as the buildcache destination""" - filename = str(tmpdir.join("spack.yaml")) - with open(filename, "w") as f: + fst, snd = (tmp_path / "first").as_uri(), (tmp_path / "second").as_uri() + with open(tmp_path / "spack.yaml", "w") as f: f.write( - """\ + f"""\ spack: specs: - archive-files mirrors: - some-mirror: file:///this/is/a/source/mirror - buildcache-destination: file:///push/binaries/here + some-mirror: {fst} + buildcache-destination: {snd} ci: pipeline-gen: - submapping: @@ -2263,16 +1840,10 @@ spack: """ ) - with tmpdir.as_cwd(): - env_cmd("create", "test", "./spack.yaml") - outputfile = str(tmpdir.join(".gitlab-ci.yml")) + with ev.Environment(tmp_path): + ci_cmd("generate", "--output-file", str(tmp_path / ".gitlab-ci.yml")) - with ev.read("test"): - ci_cmd("generate", "--output-file", outputfile) - with open(outputfile) as of: - pipeline_doc = syaml.load(of.read()) - assert "rebuild-index" in pipeline_doc - reindex_job = pipeline_doc["rebuild-index"] - assert "script" in reindex_job - reindex_step = reindex_job["script"][0] - assert "file:///push/binaries/here" in reindex_step + with open(tmp_path / ".gitlab-ci.yml") as f: + pipeline_doc = syaml.load(f) + assert fst not in pipeline_doc["rebuild-index"]["script"][0] + assert snd in pipeline_doc["rebuild-index"]["script"][0] |