diff options
author | Scott Wittenburg <scott.wittenburg@kitware.com> | 2019-07-16 08:36:31 -0600 |
---|---|---|
committer | Todd Gamblin <tgamblin@llnl.gov> | 2019-09-13 22:57:15 -0700 |
commit | 6d745a56fd1358667047d22ba183140a8ec0f5f5 (patch) | |
tree | 7f9bce20f0c48c43ecbcb678262a8900d93bec6d | |
parent | 5323a5cff90fdd647dd9150e64561b26db417d94 (diff) | |
download | spack-6d745a56fd1358667047d22ba183140a8ec0f5f5.tar.gz spack-6d745a56fd1358667047d22ba183140a8ec0f5f5.tar.bz2 spack-6d745a56fd1358667047d22ba183140a8ec0f5f5.tar.xz spack-6d745a56fd1358667047d22ba183140a8ec0f5f5.zip |
Implement an optional compiler bootstrapping phase
-rw-r--r-- | .gitlab-ci.yml | 4 | ||||
-rwxr-xr-x | bin/generate-gitlab-ci-yml.sh | 18 | ||||
-rwxr-xr-x | bin/rebuild-package.sh | 291 | ||||
-rw-r--r-- | lib/spack/docs/example_files/spack.yaml | 110 | ||||
-rw-r--r-- | lib/spack/spack/binary_distribution.py | 2 | ||||
-rw-r--r-- | lib/spack/spack/cmd/buildcache.py | 103 | ||||
-rw-r--r-- | lib/spack/spack/cmd/release_jobs.py | 405 | ||||
-rw-r--r-- | lib/spack/spack/schema/gitlab_ci.py | 95 | ||||
-rw-r--r-- | lib/spack/spack/schema/specs_deps.py | 48 | ||||
-rw-r--r-- | lib/spack/spack/test/cmd/release_jobs.py | 3 |
10 files changed, 711 insertions, 368 deletions
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 4f08bee923..11dcf1c0a2 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -1,9 +1,11 @@ generate ci jobs: + variables: + git_strategy: clone script: - "./bin/generate-gitlab-ci-yml.sh" tags: - "spack-k8s" - image: "spack/ubuntu:18.04" + image: "scottwittenburg/spack_ci_generator_alpine" artifacts: paths: - ci-generation diff --git a/bin/generate-gitlab-ci-yml.sh b/bin/generate-gitlab-ci-yml.sh index 1a021785d3..b414d6c404 100755 --- a/bin/generate-gitlab-ci-yml.sh +++ b/bin/generate-gitlab-ci-yml.sh @@ -19,8 +19,10 @@ if [ -z "${SPACK_RELEASE_ENVIRONMENT_PATH}" ] ; then fi if [ -z "${CDASH_AUTH_TOKEN}" ] ; then - echo "ERROR: missing variable: CDASH_AUTH_TOKEN" >&2 - exit 1 + echo "WARNING: missing variable: CDASH_AUTH_TOKEN" >&2 +else + token_file="${temp_dir}/cdash_auth_token" + echo ${CDASH_AUTH_TOKEN} > ${token_file} fi if [ -z "${SPACK_RELEASE_ENVIRONMENT_REPO}" ] ; then @@ -51,11 +53,14 @@ fi cd $env_dir -token_file="${temp_dir}/cdash_auth_token" -echo ${CDASH_AUTH_TOKEN} > ${token_file} +# The next commands generates the .gitlab-ci.yml (and optionally creates a +# buildgroup in cdash) +RELEASE_JOBS_ARGS=("--output-file" "${gen_ci_file}") +if [ ! -z "${token_file}" ]; then + RELEASE_JOBS_ARGS+=("--cdash-credentials" "${token_file}") +fi -# This commands generates the .gitlab-ci.yml and creates buildgroup in cdash -spack release-jobs --force --output-file ${gen_ci_file} --cdash-credentials ${token_file} +spack release-jobs "${RELEASE_JOBS_ARGS[@]}" if [[ $? -ne 0 ]]; then echo "spack release-jobs command failed" @@ -64,6 +69,7 @@ fi cp ${gen_ci_file} "${original_directory}/.gitlab-ci.yml" +# Remove global from here, it's clobbering people git identity config git config --global user.email "robot@spack.io" git config --global user.name "Build Robot" diff --git a/bin/rebuild-package.sh b/bin/rebuild-package.sh index 3813a92a1f..657e431052 100755 --- a/bin/rebuild-package.sh +++ b/bin/rebuild-package.sh @@ -12,29 +12,44 @@ ### not (i.e. the source code has changed in a way that caused a change in the ### full_hash of the spec), this script will build the package, create a ### binary cache for it, and then push all related files to the remote binary -### mirror. This script also communicates with a remote CDash instance to -### share status on the package build process. +### mirror. This script also optionally communicates with a remote CDash +### instance to share status on the package build process. ### -### The following environment variables are expected to be set in order for -### the various elements in this script to function properly. Listed first -### are two defaults we rely on from gitlab, then three we set up in the -### variables section of gitlab ourselves, and finally four variables -### written into the .gitlab-ci.yml file. +### The following environment variables are (possibly) used within this script +### in order for the various elements function properly. +### +### First are two defaults we rely on from gitlab: ### ### CI_PROJECT_DIR ### CI_JOB_NAME ### +### The following must be set up in the variables section of gitlab: +### ### AWS_ACCESS_KEY_ID ### AWS_SECRET_ACCESS_KEY ### SPACK_SIGNING_KEY ### -### CDASH_BASE_URL -### CDASH_PROJECT -### CDASH_PROJECT_ENC -### CDASH_BUILD_NAME -### ROOT_SPEC -### DEPENDENCIES -### MIRROR_URL +### SPACK_S3_UPLOAD_MIRROR_URL // only required in the short term for the cloud case +### +### The following variabes are defined by the ci generation process and are +### required: +### +### SPACK_ENABLE_CDASH +### SPACK_ROOT_SPEC +### SPACK_MIRROR_URL +### SPACK_JOB_SPEC_PKG_NAME +### SPACK_COMPILER_ACTION +### +### Finally, these variables are optionally defined by the ci generation +### process, and may or may not be present: +### +### SPACK_CDASH_BASE_URL +### SPACK_CDASH_PROJECT +### SPACK_CDASH_PROJECT_ENC +### SPACK_CDASH_BUILD_NAME +### SPACK_CDASH_SITE +### SPACK_RELATED_BUILDS +### SPACK_JOB_SPEC_BUILDGROUP ### shopt -s expand_aliases @@ -48,14 +63,19 @@ SPEC_DIR="${TEMP_DIR}/specs" LOCAL_MIRROR="${CI_PROJECT_DIR}/local_mirror" BUILD_CACHE_DIR="${LOCAL_MIRROR}/build_cache" SPACK_BIN_DIR="${CI_PROJECT_DIR}/bin" -CDASH_UPLOAD_URL="${CDASH_BASE_URL}/submit.php?project=${CDASH_PROJECT_ENC}" -DEP_JOB_RELATEBUILDS_URL="${CDASH_BASE_URL}/api/v1/relateBuilds.php" -declare -a JOB_DEPS_PKG_NAMES + +if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then + CDASH_UPLOAD_URL="${SPACK_CDASH_BASE_URL}/submit.php?project=${SPACK_CDASH_PROJECT_ENC}" + DEP_JOB_RELATEBUILDS_URL="${SPACK_CDASH_BASE_URL}/api/v1/relateBuilds.php" + declare -a JOB_DEPS_PKG_NAMES +fi export SPACK_ROOT=${CI_PROJECT_DIR} -export PATH="${SPACK_BIN_DIR}:${PATH}" +# export PATH="${SPACK_BIN_DIR}:${PATH}" export GNUPGHOME="${CI_PROJECT_DIR}/opt/spack/gpg" +. "${CI_PROJECT_DIR}/share/spack/setup-env.sh" + mkdir -p ${JOB_LOG_DIR} mkdir -p ${SPEC_DIR} @@ -160,41 +180,89 @@ EOF } gen_full_specs_for_job_and_deps() { + SPEC_YAML_PATH="${SPEC_DIR}/${SPACK_JOB_SPEC_PKG_NAME}.yaml" + local spec_names_to_save="${SPACK_JOB_SPEC_PKG_NAME}" + + if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then + IFS=';' read -ra DEPS <<< "${SPACK_RELATED_BUILDS}" + for i in "${DEPS[@]}"; do + depPkgName="${i}" + spec_names_to_save="${spec_names_to_save} ${depPkgName}" + JOB_DEPS_PKG_NAMES+=("${depPkgName}") + done + fi - read -ra PARTSARRAY <<< "${CI_JOB_NAME}" - local pkgName="${PARTSARRAY[0]}" - local pkgVersion="${PARTSARRAY[1]}" - local compiler="${PARTSARRAY[2]}" - local osarch="${PARTSARRAY[3]}" - local buildGroup="${PARTSARRAY[@]:4}" # get everything after osarch - - JOB_GROUP="${buildGroup}" - JOB_PKG_NAME="${pkgName}" - SPEC_YAML_PATH="${SPEC_DIR}/${pkgName}.yaml" - local root_spec_name="${ROOT_SPEC}" - local spec_names_to_save="${pkgName}" - - IFS=';' read -ra DEPS <<< "${DEPENDENCIES}" - for i in "${DEPS[@]}"; do - read -ra PARTSARRAY <<< "${i}" - pkgName="${PARTSARRAY[0]}" - spec_names_to_save="${spec_names_to_save} ${pkgName}" - JOB_DEPS_PKG_NAMES+=("${pkgName}") - done - - spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec "${root_spec_name}" --yaml-dir "${SPEC_DIR}" + if [ "${SPACK_COMPILER_ACTION}" == "FIND_ANY" ]; then + # This corresponds to a bootstrapping phase where we need to + # rely on any available compiler to build the package (i.e. the + # compiler needed to be stripped from the spec), and thus we need + # to concretize the root spec again. + spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec "${SPACK_ROOT_SPEC}" --yaml-dir "${SPEC_DIR}" + else + # in this case, either we're relying on Spack to install missing compiler + # bootstrapped in a previous phase, or else we only had one phase (like a + # site which already knows what compilers are available on it's runners), + # so we don't want to concretize that root spec again. The reason we need + # this in the first case (bootstrapped compiler), is that we can't concretize + # a spec at this point if we're going to ask spack to "install_missing_compilers". + tmp_dir=$(mktemp -d) + TMP_YAML_PATH="${tmp_dir}/root.yaml" + ROOT_SPEC_YAML=$(spack python -c "import base64 ; import zlib ; print(str(zlib.decompress(base64.b64decode('${SPACK_ROOT_SPEC}')).decode('utf-8')))") + echo "${ROOT_SPEC_YAML}" > "${TMP_YAML_PATH}" + spack -d buildcache save-yaml --specs "${spec_names_to_save}" --root-spec-yaml "${TMP_YAML_PATH}" --yaml-dir "${SPEC_DIR}" + rm -rf ${tmp_dir} + fi } begin_logging -gen_full_specs_for_job_and_deps +echo "Running job for spec: ${CI_JOB_NAME}" + +# This should create the directory we referred to as GNUPGHOME earlier +spack gpg list -echo "Building package ${CDASH_BUILD_NAME}, ${HASH}, ${MIRROR_URL}" +# Importing the secret key using gpg2 directly should allow to +# sign and verify both +set +x +KEY_IMPORT_RESULT=`echo ${SPACK_SIGNING_KEY} | base64 --decode | gpg2 --import` +check_error $? "gpg2 --import" +set -x + +spack gpg list --trusted +spack gpg list --signing + +# To have spack install missing compilers, we need to add a custom +# configuration scope, then we pass that to the package installation +# command +CUSTOM_CONFIG_SCOPE_DIR="${TEMP_DIR}/config_scope" +mkdir -p "${CUSTOM_CONFIG_SCOPE_DIR}" +CUSTOM_CONFIG_SCOPE_ARG="" + +if [ "${SPACK_COMPILER_ACTION}" == "INSTALL_MISSING" ]; then + echo "Make sure bootstrapped compiler will be installed" + custom_config_file_path="${CUSTOM_CONFIG_SCOPE_DIR}/config.yaml" + cat <<CONFIG_STUFF > "${custom_config_file_path}" +config: + install_missing_compilers: true +CONFIG_STUFF + CUSTOM_CONFIG_SCOPE_ARG="-C ${CUSTOM_CONFIG_SCOPE_DIR}" + # Configure the binary mirror where, if needed, this jobs compiler + # was installed in binary pacakge form, then tell spack to + # install_missing_compilers. +elif [ "${SPACK_COMPILER_ACTION}" == "FIND_ANY" ]; then + echo "Just find any available compiler" + spack compiler find +else + echo "No compiler action to be taken" +fi # Finally, list the compilers spack knows about echo "Compiler Configurations:" spack config get compilers +# Write full-deps yamls for this job spec and its dependencies +gen_full_specs_for_job_and_deps + # Make the build_cache directory if it doesn't exist mkdir -p "${BUILD_CACHE_DIR}" @@ -204,41 +272,41 @@ mkdir -p "${BUILD_CACHE_DIR}" # to fail. JOB_BUILD_CACHE_ENTRY_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${SPEC_YAML_PATH}"` if [[ $? -ne 0 ]]; then - echo "ERROR, unable to get buildcache entry name for job ${CI_JOB_NAME} (spec: ${CDASH_BUILD_NAME})" + echo "ERROR, unable to get buildcache entry name for job ${CI_JOB_NAME}" exit 1 fi -# This should create the directory we referred to as GNUPGHOME earlier -spack gpg list - -# Importing the secret key using gpg2 directly should allow to -# sign and verify both -set +x -KEY_IMPORT_RESULT=`echo ${SPACK_SIGNING_KEY} | base64 --decode | gpg2 --import` -check_error $? "gpg2 --import" -set -x - -spack gpg list --trusted -spack gpg list --signing - -# Whether we have to build the spec or download it pre-built, we expect to find -# the cdash build id file sitting in this location afterwards. -JOB_CDASH_ID_FILE="${BUILD_CACHE_DIR}/${JOB_BUILD_CACHE_ENTRY_NAME}.cdashid" +if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then + # Whether we have to build the spec or download it pre-built, we expect to find + # the cdash build id file sitting in this location afterwards. + JOB_CDASH_ID_FILE="${BUILD_CACHE_DIR}/${JOB_BUILD_CACHE_ENTRY_NAME}.cdashid" +fi # Finally, we can check the spec we have been tasked with build against # the built binary on the remote mirror to see if it needs to be rebuilt -spack -d buildcache check --spec-yaml "${SPEC_YAML_PATH}" --mirror-url "${MIRROR_URL}" --rebuild-on-error +spack -d buildcache check --spec-yaml "${SPEC_YAML_PATH}" --mirror-url "${SPACK_MIRROR_URL}" --rebuild-on-error if [[ $? -ne 0 ]]; then # Configure mirror spack mirror add local_artifact_mirror "file://${LOCAL_MIRROR}" - JOB_CDASH_ID="NONE" + if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then + JOB_CDASH_ID="NONE" - # Install package, using the buildcache from the local mirror to - # satisfy dependencies. - BUILD_ID_LINE=`spack -d -k -v install --use-cache --keep-stage --cdash-upload-url "${CDASH_UPLOAD_URL}" --cdash-build "${CDASH_BUILD_NAME}" --cdash-site "Spack AWS Gitlab Instance" --cdash-track "${JOB_GROUP}" -f "${SPEC_YAML_PATH}" | grep "buildSummary\\.php"` - check_error $? "spack install" + # Install package, using the buildcache from the local mirror to + # satisfy dependencies. + BUILD_ID_LINE=`spack -d -k -v "${CUSTOM_CONFIG_SCOPE_ARG}" install --keep-stage --cdash-upload-url "${CDASH_UPLOAD_URL}" --cdash-build "${SPACK_CDASH_BUILD_NAME}" --cdash-site "${SPACK_CDASH_SITE}" --cdash-track "${SPACK_JOB_SPEC_BUILDGROUP}" -f "${SPEC_YAML_PATH}" | grep "buildSummary\\.php"` + check_error $? "spack install" + + # By parsing the output of the "spack install" command, we can get the + # buildid generated for us by CDash + JOB_CDASH_ID=$(extract_build_id "${BUILD_ID_LINE}") + + # Write the .cdashid file to the buildcache as well + echo "${JOB_CDASH_ID}" >> ${JOB_CDASH_ID_FILE} + else + spack -d -k -v "${CUSTOM_CONFIG_SCOPE_ARG}" install --keep-stage -f "${SPEC_YAML_PATH}" + fi # Copy some log files into an artifact location, once we have a way # to provide a spec.yaml file to more spack commands (e.g. "location") @@ -248,73 +316,76 @@ if [[ $? -ne 0 ]]; then # cp "${build_log_file}" "${JOB_LOG_DIR}/" # cp "${config_log_file}" "${JOB_LOG_DIR}/" - # By parsing the output of the "spack install" command, we can get the - # buildid generated for us by CDash - JOB_CDASH_ID=$(extract_build_id "${BUILD_ID_LINE}") - # Create buildcache entry for this package, reading the spec from the yaml # file. spack -d buildcache create --spec-yaml "${SPEC_YAML_PATH}" -a -f -d "${LOCAL_MIRROR}" --no-rebuild-index check_error $? "spack buildcache create" - # Write the .cdashid file to the buildcache as well - echo "${JOB_CDASH_ID}" >> ${JOB_CDASH_ID_FILE} - # TODO: The upload-s3 command should eventually be replaced with something # like: "spack buildcache put <mirror> <spec>", when that subcommand is # properly implemented. - spack -d upload-s3 spec --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" - check_error $? "spack upload-s3 spec" + if [ ! -z "${SPACK_S3_UPLOAD_MIRROR_URL}" ] ; then + spack -d upload-s3 spec --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" --endpoint-url "${SPACK_S3_UPLOAD_MIRROR_URL}" + check_error $? "spack upload-s3 spec" + else + spack -d buildcache copy --base-dir "${LOCAL_MIRROR}" --spec-yaml "${SPEC_YAML_PATH}" --destination-url "${SPACK_MIRROR_URL}" + fi else - echo "spec ${CDASH_BUILD_NAME} is already up to date on remote mirror, downloading it" + echo "spec ${CI_JOB_NAME} is already up to date on remote mirror, downloading it" # Configure remote mirror so we can download buildcache entry - spack mirror add remote_binary_mirror ${MIRROR_URL} + spack mirror add remote_binary_mirror ${SPACK_MIRROR_URL} # Now download it - spack -d buildcache download --spec-yaml "${SPEC_YAML_PATH}" --path "${BUILD_CACHE_DIR}/" --require-cdashid + BUILDCACHE_DL_ARGS=("--spec-yaml" "${SPEC_YAML_PATH}" "--path" "${BUILD_CACHE_DIR}/" ) + if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then + BUILDCACHE_DL_ARGS+=( "--require-cdashid" ) + fi + spack -d buildcache download "${BUILDCACHE_DL_ARGS[@]}" check_error $? "spack buildcache download" fi # The next step is to relate this job to the jobs it depends on -if [ -f "${JOB_CDASH_ID_FILE}" ]; then - JOB_CDASH_BUILD_ID=$(<${JOB_CDASH_ID_FILE}) +if [ "${SPACK_ENABLE_CDASH}" == "True" ] ; then + if [ -f "${JOB_CDASH_ID_FILE}" ]; then + JOB_CDASH_BUILD_ID=$(<${JOB_CDASH_ID_FILE}) - if [ "${JOB_CDASH_BUILD_ID}" == "NONE" ]; then - echo "ERROR: unable to read this jobs id from ${JOB_CDASH_ID_FILE}" - exit 1 - fi + if [ "${JOB_CDASH_BUILD_ID}" == "NONE" ]; then + echo "ERROR: unable to read this jobs id from ${JOB_CDASH_ID_FILE}" + exit 1 + fi - # Now get CDash ids for dependencies and "relate" each dependency build - # with this jobs build - for DEP_PKG_NAME in "${JOB_DEPS_PKG_NAMES[@]}"; do - echo "Getting cdash id for dependency --> ${DEP_PKG_NAME} <--" - DEP_SPEC_YAML_PATH="${SPEC_DIR}/${DEP_PKG_NAME}.yaml" - DEP_JOB_BUILDCACHE_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${DEP_SPEC_YAML_PATH}"` - - if [[ $? -eq 0 ]]; then - DEP_JOB_ID_FILE="${BUILD_CACHE_DIR}/${DEP_JOB_BUILDCACHE_NAME}.cdashid" - echo "DEP_JOB_ID_FILE path = ${DEP_JOB_ID_FILE}" - - if [ -f "${DEP_JOB_ID_FILE}" ]; then - DEP_JOB_CDASH_BUILD_ID=$(<${DEP_JOB_ID_FILE}) - echo "File ${DEP_JOB_ID_FILE} contained value ${DEP_JOB_CDASH_BUILD_ID}" - echo "Relating builds -> ${CDASH_BUILD_NAME} (buildid=${JOB_CDASH_BUILD_ID}) depends on ${DEP_PKG_NAME} (buildid=${DEP_JOB_CDASH_BUILD_ID})" - relateBuildsPostBody="$(get_relate_builds_post_data "${CDASH_PROJECT}" ${JOB_CDASH_BUILD_ID} ${DEP_JOB_CDASH_BUILD_ID})" - relateBuildsResult=`curl "${DEP_JOB_RELATEBUILDS_URL}" -H "Content-Type: application/json" -H "Accept: application/json" -d "${relateBuildsPostBody}"` - echo "Result of curl request: ${relateBuildsResult}" + # Now get CDash ids for dependencies and "relate" each dependency build + # with this jobs build + for DEP_PKG_NAME in "${JOB_DEPS_PKG_NAMES[@]}"; do + echo "Getting cdash id for dependency --> ${DEP_PKG_NAME} <--" + DEP_SPEC_YAML_PATH="${SPEC_DIR}/${DEP_PKG_NAME}.yaml" + DEP_JOB_BUILDCACHE_NAME=`spack -d buildcache get-buildcache-name --spec-yaml "${DEP_SPEC_YAML_PATH}"` + + if [[ $? -eq 0 ]]; then + DEP_JOB_ID_FILE="${BUILD_CACHE_DIR}/${DEP_JOB_BUILDCACHE_NAME}.cdashid" + echo "DEP_JOB_ID_FILE path = ${DEP_JOB_ID_FILE}" + + if [ -f "${DEP_JOB_ID_FILE}" ]; then + DEP_JOB_CDASH_BUILD_ID=$(<${DEP_JOB_ID_FILE}) + echo "File ${DEP_JOB_ID_FILE} contained value ${DEP_JOB_CDASH_BUILD_ID}" + echo "Relating builds -> ${SPACK_CDASH_BUILD_NAME} (buildid=${JOB_CDASH_BUILD_ID}) depends on ${DEP_PKG_NAME} (buildid=${DEP_JOB_CDASH_BUILD_ID})" + relateBuildsPostBody="$(get_relate_builds_post_data "${SPACK_CDASH_PROJECT}" ${JOB_CDASH_BUILD_ID} ${DEP_JOB_CDASH_BUILD_ID})" + relateBuildsResult=`curl "${DEP_JOB_RELATEBUILDS_URL}" -H "Content-Type: application/json" -H "Accept: application/json" -d "${relateBuildsPostBody}"` + echo "Result of curl request: ${relateBuildsResult}" + else + echo "ERROR: Did not find expected .cdashid file for dependency: ${DEP_JOB_ID_FILE}" + exit 1 + fi else - echo "ERROR: Did not find expected .cdashid file for dependency: ${DEP_JOB_ID_FILE}" + echo "ERROR: Unable to get buildcache entry name for ${DEP_SPEC_NAME}" exit 1 fi - else - echo "ERROR: Unable to get buildcache entry name for ${DEP_SPEC_NAME}" - exit 1 - fi - done -else - echo "ERROR: Did not find expected .cdashid file ${JOB_CDASH_ID_FILE}" - exit 1 + done + else + echo "ERROR: Did not find expected .cdashid file ${JOB_CDASH_ID_FILE}" + exit 1 + fi fi # Show the size of the buildcache and a list of what's in it, directly diff --git a/lib/spack/docs/example_files/spack.yaml b/lib/spack/docs/example_files/spack.yaml index 7af7aebd75..01756f8ba4 100644 --- a/lib/spack/docs/example_files/spack.yaml +++ b/lib/spack/docs/example_files/spack.yaml @@ -1,11 +1,19 @@ spack: definitions: + - compiler-pkgs: + - 'llvm+clang@6.0.1 os=centos7' + - 'gcc@6.5.0 os=centos7' + - 'llvm+clang@6.0.1 os=ubuntu18.04' + - 'gcc@6.5.0 os=ubuntu18.04' - pkgs: - readline@7.0 + # - xsdk@0.4.0 - compilers: - '%gcc@5.5.0' + - '%gcc@6.5.0' - '%gcc@7.3.0' - '%clang@6.0.0' + - '%clang@6.0.1' - oses: - os=ubuntu18.04 - os=centos7 @@ -17,15 +25,15 @@ spack: - [$oses] exclude: - '%gcc@7.3.0 os=centos7' + - '%gcc@5.5.0 os=ubuntu18.04' mirrors: cloud_gitlab: https://mirror.spack.io compilers: - # The .gitlab-ci.yml for this project picks a Docker container which is - # based on ubuntu18.04 and which already has some compilers configured. - # Here we just add some of the ones which are defined on a different - # builder image. + # The .gitlab-ci.yml for this project picks a Docker container which does + # not have any compilers pre-built and ready to use, so we need to fake the + # existence of those here. - compiler: operating_system: centos7 modules: [] @@ -44,13 +52,76 @@ spack: cxx: /not/used f77: /not/used fc: /not/used + spec: gcc@6.5.0 + target: x86_64 + - compiler: + operating_system: centos7 + modules: [] + paths: + cc: /not/used + cxx: /not/used + f77: /not/used + fc: /not/used spec: clang@6.0.0 target: x86_64 + - compiler: + operating_system: centos7 + modules: [] + paths: + cc: /not/used + cxx: /not/used + f77: /not/used + fc: /not/used + spec: clang@6.0.1 + target: x86_64 + - compiler: + operating_system: ubuntu18.04 + modules: [] + paths: + cc: /not/used + cxx: /not/used + f77: /not/used + fc: /not/used + spec: clang@6.0.0 + target: x86_64 + - compiler: + operating_system: ubuntu18.04 + modules: [] + paths: + cc: /not/used + cxx: /not/used + f77: /not/used + fc: /not/used + spec: clang@6.0.1 + target: x86_64 + - compiler: + operating_system: ubuntu18.04 + modules: [] + paths: + cc: /not/used + cxx: /not/used + f77: /not/used + fc: /not/used + spec: gcc@6.5.0 + target: x86_64 + - compiler: + operating_system: ubuntu18.04 + modules: [] + paths: + cc: /not/used + cxx: /not/used + f77: /not/used + fc: /not/used + spec: gcc@7.3.0 + target: x86_64 gitlab-ci: + bootstrap: + - name: compiler-pkgs + compiler-agnostic: true mappings: - - spack-cloud-ubuntu: + - # spack-cloud-ubuntu match: # these are specs, if *any* match the spec under consideration, this # 'mapping' will be used to generate the CI job @@ -61,8 +132,10 @@ spack: # a part of the CI workflow tags: - spack-k8s - image: scottwittenburg/spack_builder_ubuntu_18.04 - - spack-cloud-centos: + image: + name: scottwittenburg/spack_builder_ubuntu_18.04 + entrypoint: [""] + - # spack-cloud-centos match: # these are specs, if *any* match the spec under consideration, this # 'mapping' will be used to generate the CI job @@ -70,28 +143,15 @@ spack: runner-attributes: tags: - spack-k8s - image: spack/centos:7 - - summit: - match: - - os=rhel7 - - target=power9 - - platform=secret-sauce - runner-attributes: - tags: - # this is a set of tags - - summit - - '{os}-{target}' - - rhel7 - - centos7 - - x86_64 - variables: - SCHEDULER_ARGS: "arg2 arg2" + image: + name: scottwittenburg/spack_builder_centos_7 + entrypoint: [""] cdash: build-group: Release Testing - url: https://cdash.spack.io + url: http://cdash project: Spack Testing - site: Spack AWS Gitlab Instance + site: Spack Docker-Compose Workflow repos: [] upstreams: {} diff --git a/lib/spack/spack/binary_distribution.py b/lib/spack/spack/binary_distribution.py index bae8edd884..8dcacb6df3 100644 --- a/lib/spack/spack/binary_distribution.py +++ b/lib/spack/spack/binary_distribution.py @@ -801,7 +801,7 @@ def _download_buildcache_entry(mirror_root, descriptions): for description in descriptions: url = os.path.join(mirror_root, description['url']) path = description['path'] - fail_if_missing = not description['required'] + fail_if_missing = description['required'] mkdirp(path) diff --git a/lib/spack/spack/cmd/buildcache.py b/lib/spack/spack/cmd/buildcache.py index 6a9f038d07..b3d73053e4 100644 --- a/lib/spack/spack/cmd/buildcache.py +++ b/lib/spack/spack/cmd/buildcache.py @@ -5,6 +5,7 @@ import argparse import os +import shutil import sys import llnl.util.tty as tty @@ -176,9 +177,12 @@ def setup_parser(subparser): saveyaml = subparsers.add_parser('save-yaml', help=save_spec_yamls.__doc__) saveyaml.add_argument( - '-r', '--root-spec', default=None, + '--root-spec', default=None, help='Root spec of dependent spec') saveyaml.add_argument( + '--root-spec-yaml', default=None, + help='Path to yaml file containing root spec of dependent spec') + saveyaml.add_argument( '-s', '--specs', default=None, help='List of dependent specs for which saved yaml is desired') saveyaml.add_argument( @@ -186,6 +190,19 @@ def setup_parser(subparser): help='Path to directory where spec yamls should be saved') saveyaml.set_defaults(func=save_spec_yamls) + # Copy buildcache from some directory to another mirror url + copy = subparsers.add_parser('copy', help=buildcache_copy.__doc__) + copy.add_argument( + '--base-dir', default=None, + help='Path to mirror directory (root of existing buildcache)') + copy.add_argument( + '--spec-yaml', default=None, + help='Path to spec yaml file representing buildcache entry to copy') + copy.add_argument( + '--destination-url', default=None, + help='Destination mirror url') + copy.set_defaults(func=buildcache_copy) + def find_matching_specs(pkgs, allow_multiple_matches=False, env=None): """Returns a list of specs matching the not necessarily @@ -526,7 +543,7 @@ def save_spec_yamls(args): successful. If any errors or exceptions are encountered, or if expected command-line arguments are not provided, then the exit code will be non-zero.""" - if not args.root_spec: + if not args.root_spec and not args.root_spec_yaml: tty.msg('No root spec provided, exiting.') sys.exit(1) @@ -538,9 +555,13 @@ def save_spec_yamls(args): tty.msg('No yaml directory provided, exiting.') sys.exit(1) - root_spec = Spec(args.root_spec) - root_spec.concretize() - root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash) + if args.root_spec_yaml: + with open(args.root_spec_yaml) as fd: + root_spec_as_yaml = fd.read() + else: + root_spec = Spec(args.root_spec) + root_spec.concretize() + root_spec_as_yaml = root_spec.to_yaml(hash=ht.build_hash) save_dependency_spec_yamls( root_spec_as_yaml, args.yaml_dir, args.specs.split()) @@ -548,6 +569,78 @@ def save_spec_yamls(args): sys.exit(0) +def buildcache_copy(args): + """Copy a buildcache entry and all its files from one mirror, given as + '--base-dir', to some other mirror, specified as '--destination-url'. + The specific buildcache entry to be copied from one location to the + other is identified using the '--spec-yaml' argument.""" + # TODO: This sub-command should go away once #11117 is merged + + if not args.spec_yaml: + tty.msg('No spec yaml provided, exiting.') + sys.exit(1) + + if not args.base_dir: + tty.msg('No base directory provided, exiting.') + sys.exit(1) + + if not args.destination_url: + tty.msg('No destination mirror url provided, exiting.') + sys.exit(1) + + dest_url = args.destination_url + + if dest_url[0:7] != 'file://' and dest_url[0] != '/': + tty.msg('Only urls beginning with "file://" or "/" are supported ' + + 'by buildcache copy.') + sys.exit(1) + + try: + with open(args.spec_yaml, 'r') as fd: + spec = Spec.from_yaml(fd.read()) + except Exception as e: + tty.debug(e) + tty.error('Unable to concrectize spec from yaml {0}'.format( + args.spec_yaml)) + sys.exit(1) + + dest_root_path = dest_url + if dest_url[0:7] == 'file://': + dest_root_path = dest_url[7:] + + build_cache_dir = bindist.build_cache_relative_path() + + tarball_rel_path = os.path.join( + build_cache_dir, bindist.tarball_path_name(spec, '.spack')) + tarball_src_path = os.path.join(args.base_dir, tarball_rel_path) + tarball_dest_path = os.path.join(dest_root_path, tarball_rel_path) + + specfile_rel_path = os.path.join( + build_cache_dir, bindist.tarball_name(spec, '.spec.yaml')) + specfile_src_path = os.path.join(args.base_dir, specfile_rel_path) + specfile_dest_path = os.path.join(dest_root_path, specfile_rel_path) + + cdashidfile_rel_path = os.path.join( + build_cache_dir, bindist.tarball_name(spec, '.cdashid')) + cdashid_src_path = os.path.join(args.base_dir, cdashidfile_rel_path) + cdashid_dest_path = os.path.join(dest_root_path, cdashidfile_rel_path) + + # Make sure directory structure exists before attempting to copy + os.makedirs(os.path.dirname(tarball_dest_path)) + + # Now copy the specfile and tarball files to the destination mirror + tty.msg('Copying {0}'.format(tarball_rel_path)) + shutil.copyfile(tarball_src_path, tarball_dest_path) + + tty.msg('Copying {0}'.format(specfile_rel_path)) + shutil.copyfile(specfile_src_path, specfile_dest_path) + + # Copy the cdashid file (if exists) to the destination mirror + if os.path.exists(cdashid_src_path): + tty.msg('Copying {0}'.format(cdashidfile_rel_path)) + shutil.copyfile(cdashid_src_path, cdashid_dest_path) + + def buildcache(parser, args): if args.func: args.func(args) diff --git a/lib/spack/spack/cmd/release_jobs.py b/lib/spack/spack/cmd/release_jobs.py index 3ec85ea960..837065824a 100644 --- a/lib/spack/spack/cmd/release_jobs.py +++ b/lib/spack/spack/cmd/release_jobs.py @@ -3,9 +3,10 @@ # # SPDX-License-Identifier: (Apache-2.0 OR MIT) +import base64 import json +import zlib -from jsonschema import validate, ValidationError from six import iteritems from six.moves.urllib.error import HTTPError, URLError from six.moves.urllib.parse import urlencode @@ -14,10 +15,11 @@ from six.moves.urllib.request import build_opener, HTTPHandler, Request import llnl.util.tty as tty import spack.environment as ev +import spack.compilers as compilers from spack.dependency import all_deptypes from spack.error import SpackError +import spack.hash_types as ht from spack.spec import Spec -from spack.schema.specs_deps import schema as specs_deps_schema import spack.util.spack_yaml as syaml description = "generate release build set as .gitlab-ci.yml" @@ -27,18 +29,10 @@ level = "long" def setup_parser(subparser): subparser.add_argument( - '-f', '--force', action='store_true', default=False, - help="Force re-concretization of environment first") - - subparser.add_argument( '-o', '--output-file', default=".gitlab-ci.yml", help="path to output file to write") subparser.add_argument( - '-k', '--signing-key', default=None, - help="hash of gpg key to use for package signing") - - subparser.add_argument( '-p', '--print-summary', action='store_true', default=False, help="Print summary of staged jobs to standard output") @@ -54,7 +48,9 @@ def _create_buildgroup(opener, headers, url, project, group_name, group_type): "type": group_type } - request = Request(url, data=json.dumps(data), headers=headers) + enc_data = json.dumps(data).encode('utf-8') + + request = Request(url, data=enc_data, headers=headers) response = opener.open(request) response_code = response.getcode() @@ -103,7 +99,9 @@ def populate_buildgroup(job_names, group_name, project, site, } for name in job_names] } - request = Request(url, data=json.dumps(data), headers=headers) + enc_data = json.dumps(data).encode('utf-8') + + request = Request(url, data=enc_data, headers=headers) request.get_method = lambda: 'PUT' response = opener.open(request) @@ -115,9 +113,43 @@ def populate_buildgroup(job_names, group_name, project, site, raise SpackError(msg) -def get_job_name(spec, osarch, build_group): - return '{0} {1} {2} {3} {4}'.format( - spec.name, spec.version, spec.compiler, osarch, build_group) +def is_main_phase(phase_name): + return True if phase_name == 'specs' else False + + +def get_job_name(phase, strip_compiler, spec, osarch, build_group): + item_idx = 0 + format_str = '' + format_args = [] + + if phase: + format_str += '({{{0}}})'.format(item_idx) + format_args.append(phase) + item_idx += 1 + + format_str += ' {{{0}}}'.format(item_idx) + format_args.append(spec.name) + item_idx += 1 + + format_str += ' {{{0}}}'.format(item_idx) + format_args.append(spec.version) + item_idx += 1 + + if is_main_phase(phase) is True or strip_compiler is False: + format_str += ' {{{0}}}'.format(item_idx) + format_args.append(spec.compiler) + item_idx += 1 + + format_str += ' {{{0}}}'.format(item_idx) + format_args.append(osarch) + item_idx += 1 + + if build_group: + format_str += ' {{{0}}}'.format(item_idx) + format_args.append(build_group) + item_idx += 1 + + return format_str.format(*format_args) def get_cdash_build_name(spec, build_group): @@ -137,6 +169,17 @@ def get_spec_string(spec): return spec.format(''.join(format_elements)) +def format_root_spec(spec, main_phase, strip_compiler): + if main_phase is False and strip_compiler is True: + return '{0}@{1} arch={2}'.format( + spec.name, spec.version, spec.architecture) + else: + spec_yaml = spec.to_yaml(hash=ht.build_hash).encode('utf-8') + return str(base64.b64encode(zlib.compress(spec_yaml)).decode('utf-8')) + # return '{0}@{1}%{2} arch={3}'.format( + # spec.name, spec.version, spec.compiler, spec.architecture) + + def spec_deps_key_label(s): return s.dag_hash(), "%s/%s" % (s.name, s.dag_hash(7)) @@ -152,14 +195,6 @@ def _add_dependency(spec_label, dep_label, deps): def get_spec_dependencies(specs, deps, spec_labels): spec_deps_obj = compute_spec_deps(specs) - try: - validate(spec_deps_obj, specs_deps_schema) - except ValidationError as val_err: - tty.error('Ill-formed specs dependencies JSON object') - tty.error(spec_deps_obj) - tty.debug(val_err) - return - if spec_deps_obj: dependencies = spec_deps_obj['dependencies'] specs = spec_deps_obj['specs'] @@ -247,19 +282,19 @@ def print_staging_summary(spec_labels, dependencies, stages): if not stages: return - tty.msg('Staging summary:') + tty.msg(' Staging summary:') stage_index = 0 for stage in stages: - tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage))) + tty.msg(' stage {0} ({1} jobs):'.format(stage_index, len(stage))) for job in sorted(stage): s = spec_labels[job]['spec'] - tty.msg(' {0} -> {1}'.format(job, get_spec_string(s))) + tty.msg(' {0} -> {1}'.format(job, get_spec_string(s))) stage_index += 1 -def compute_spec_deps(spec_list, stream_like=None): +def compute_spec_deps(spec_list): """ Computes all the dependencies for the spec(s) and generates a JSON object which provides both a list of unique spec names as well as a @@ -311,10 +346,6 @@ def compute_spec_deps(spec_list, stream_like=None): ] } - The object can be optionally written out to some stream. This is - useful, for example, when we need to concretize and generate the - dependencies of a spec in a specific docker container. - """ deptype = all_deptypes spec_labels = {} @@ -331,7 +362,8 @@ def compute_spec_deps(spec_list, stream_like=None): for spec in spec_list: spec.concretize() - root_spec = get_spec_string(spec) + # root_spec = get_spec_string(spec) + root_spec = spec rkey, rlabel = spec_deps_key_label(spec) @@ -359,9 +391,6 @@ def compute_spec_deps(spec_list, stream_like=None): 'dependencies': dependencies, } - if stream_like: - stream_like.write(json.dumps(deps_json_obj)) - return deps_json_obj @@ -379,7 +408,6 @@ def find_matching_config(spec, ci_mappings): def release_jobs(parser, args): env = ev.get_env(args, 'release-jobs', required=True) - env.concretize(force=args.force) # FIXME: What's the difference between one that opens with 'spack' # and one that opens with 'env'? This will only handle the former. @@ -390,122 +418,219 @@ def release_jobs(parser, args): ci_mappings = yaml_root['gitlab-ci']['mappings'] - ci_cdash = yaml_root['cdash'] - build_group = ci_cdash['build-group'] - cdash_url = ci_cdash['url'] - cdash_project = ci_cdash['project'] - proj_enc = urlencode({'project': cdash_project}) - eq_idx = proj_enc.find('=') + 1 - cdash_project_enc = proj_enc[eq_idx:] - cdash_site = ci_cdash['site'] + build_group = None + enable_cdash_reporting = False cdash_auth_token = None - if args.cdash_credentials: - with open(args.cdash_credentials) as fd: - cdash_auth_token = fd.read() - cdash_auth_token = cdash_auth_token.strip() + if 'cdash' in yaml_root: + enable_cdash_reporting = True + ci_cdash = yaml_root['cdash'] + build_group = ci_cdash['build-group'] + cdash_url = ci_cdash['url'] + cdash_project = ci_cdash['project'] + proj_enc = urlencode({'project': cdash_project}) + eq_idx = proj_enc.find('=') + 1 + cdash_project_enc = proj_enc[eq_idx:] + cdash_site = ci_cdash['site'] + + if args.cdash_credentials: + with open(args.cdash_credentials) as fd: + cdash_auth_token = fd.read() + cdash_auth_token = cdash_auth_token.strip() ci_mirrors = yaml_root['mirrors'] - mirror_urls = ci_mirrors.values() - - spec_labels, dependencies, stages = stage_spec_jobs(env.all_specs()) - - if not stages: - tty.msg('No jobs staged, exiting.') - return + mirror_urls = [url for url in ci_mirrors.values()] + + bootstrap_specs = [] + phases = [] + if 'bootstrap' in yaml_root['gitlab-ci']: + for phase in yaml_root['gitlab-ci']['bootstrap']: + try: + phase_name = phase.get('name') + strip_compilers = phase.get('compiler-agnostic') + except AttributeError: + phase_name = phase + strip_compilers = False + phases.append({ + 'name': phase_name, + 'strip-compilers': strip_compilers, + }) + + for bs in env.spec_lists[phase_name]: + bootstrap_specs.append({ + 'spec': bs, + 'phase-name': phase_name, + 'strip-compilers': strip_compilers, + }) + + phases.append({ + 'name': 'specs', + 'strip-compilers': False, + }) + + staged_phases = {} + for phase in phases: + phase_name = phase['name'] + staged_phases[phase_name] = stage_spec_jobs(env.spec_lists[phase_name]) if args.print_summary: - print_staging_summary(spec_labels, dependencies, stages) + for phase in phases: + phase_name = phase['name'] + tty.msg('Stages for phase "{0}"'.format(phase_name)) + phase_stages = staged_phases[phase_name] + print_staging_summary(*phase_stages) all_job_names = [] output_object = {} - job_count = 0 - - stage_names = ['stage-{0}'.format(i) for i in range(len(stages))] - stage = 0 - - for stage_jobs in stages: - stage_name = stage_names[stage] - - for spec_label in stage_jobs: - release_spec = spec_labels[spec_label]['spec'] - root_spec = spec_labels[spec_label]['rootSpec'] - - runner_attribs = find_matching_config(release_spec, ci_mappings) - - if not runner_attribs: - tty.warn('No match found for {0}, skipping it'.format( - release_spec)) - continue - - tags = [tag for tag in runner_attribs['tags']] - - variables = {} - if 'variables' in runner_attribs: - variables.update(runner_attribs['variables']) - - build_image = None - if 'image' in runner_attribs: - build_image = runner_attribs['image'] - - osname = str(release_spec.architecture) - job_name = get_job_name(release_spec, osname, build_group) - cdash_build_name = get_cdash_build_name(release_spec, build_group) - - all_job_names.append(cdash_build_name) - - job_scripts = ['./bin/rebuild-package.sh'] - - job_dependencies = [] - if spec_label in dependencies: - job_dependencies = ( - [get_job_name(spec_labels[d]['spec'], osname, build_group) - for d in dependencies[spec_label]]) - - job_variables = { - 'MIRROR_URL': mirror_urls[0], - 'CDASH_BASE_URL': cdash_url, - 'CDASH_PROJECT': cdash_project, - 'CDASH_PROJECT_ENC': cdash_project_enc, - 'CDASH_BUILD_NAME': cdash_build_name, - 'DEPENDENCIES': ';'.join(job_dependencies), - 'ROOT_SPEC': str(root_spec), - } - - if args.signing_key: - job_variables['SIGN_KEY_HASH'] = args.signing_key - - variables.update(job_variables) - - job_object = { - 'stage': stage_name, - 'variables': variables, - 'script': job_scripts, - 'artifacts': { - 'paths': [ - 'local_mirror/build_cache', - 'jobs_scratch_dir', - 'cdash_report', - ], - 'when': 'always', - }, - 'dependencies': job_dependencies, - 'tags': tags, - } - - if build_image: - job_object['image'] = build_image - - output_object[job_name] = job_object - job_count += 1 - - stage += 1 + job_id = 0 + stage_id = 0 + + stage_names = [] + + for phase in phases: + phase_name = phase['name'] + strip_compilers = phase['strip-compilers'] + + main_phase = is_main_phase(phase_name) + spec_labels, dependencies, stages = staged_phases[phase_name] + + for stage_jobs in stages: + stage_name = 'stage-{0}'.format(stage_id) + stage_names.append(stage_name) + stage_id += 1 + + for spec_label in stage_jobs: + release_spec = spec_labels[spec_label]['spec'] + root_spec = spec_labels[spec_label]['rootSpec'] + + runner_attribs = find_matching_config(root_spec, ci_mappings) + + if not runner_attribs: + tty.warn('No match found for {0}, skipping it'.format( + release_spec)) + continue + + tags = [tag for tag in runner_attribs['tags']] + + variables = {} + if 'variables' in runner_attribs: + variables.update(runner_attribs['variables']) + + image_name = None + image_entry = None + if 'image' in runner_attribs: + build_image = runner_attribs['image'] + try: + image_name = build_image.get('name') + entrypoint = build_image.get('entrypoint') + image_entry = [p for p in entrypoint] + except AttributeError: + image_name = build_image + + osname = str(release_spec.architecture) + job_name = get_job_name(phase_name, strip_compilers, + release_spec, osname, build_group) + + job_scripts = ['./bin/rebuild-package.sh'] + + compiler_action = 'NONE' + if len(phases) > 1: + compiler_action = 'FIND_ANY' + if is_main_phase(phase_name): + compiler_action = 'INSTALL_MISSING' + + job_vars = { + 'SPACK_MIRROR_URL': mirror_urls[0], + 'SPACK_ROOT_SPEC': format_root_spec( + root_spec, main_phase, strip_compilers), + 'SPACK_JOB_SPEC_PKG_NAME': release_spec.name, + 'SPACK_COMPILER_ACTION': compiler_action, + } + + job_dependencies = [] + if spec_label in dependencies: + job_dependencies = ( + [get_job_name(phase_name, strip_compilers, + spec_labels[dep_label]['spec'], + osname, build_group) + for dep_label in dependencies[spec_label]]) + + # This next section helps gitlab make sure the right + # bootstrapped compiler exists in the artifacts buildcache by + # creating an artificial dependency between this spec and its + # compiler. So, if we are in the main phase, and if the + # compiler we are supposed to use is listed in any of the + # bootstrap spec lists, then we will add one more dependency to + # "job_dependencies" (that compiler). + if is_main_phase(phase_name): + compiler_pkg_spec = compilers.pkg_spec_for_compiler( + release_spec.compiler) + for bs in bootstrap_specs: + bs_arch = bs['spec'].architecture + if (bs['spec'].satisfies(compiler_pkg_spec) and + bs_arch == release_spec.architecture): + c_job_name = get_job_name(bs['phase-name'], + bs['strip-compilers'], + bs['spec'], + str(bs_arch), + build_group) + job_dependencies.append(c_job_name) + + if enable_cdash_reporting: + cdash_build_name = get_cdash_build_name( + release_spec, build_group) + all_job_names.append(cdash_build_name) + + related_builds = [] # Used for relating CDash builds + if spec_label in dependencies: + related_builds = ( + [spec_labels[d]['spec'].name + for d in dependencies[spec_label]]) + + job_vars['SPACK_CDASH_BASE_URL'] = cdash_url + job_vars['SPACK_CDASH_PROJECT'] = cdash_project + job_vars['SPACK_CDASH_PROJECT_ENC'] = cdash_project_enc + job_vars['SPACK_CDASH_BUILD_NAME'] = cdash_build_name + job_vars['SPACK_CDASH_SITE'] = cdash_site + job_vars['SPACK_RELATED_BUILDS'] = ';'.join(related_builds) + job_vars['SPACK_JOB_SPEC_BUILDGROUP'] = build_group + + job_vars['SPACK_ENABLE_CDASH'] = str(enable_cdash_reporting) + + variables.update(job_vars) + + job_object = { + 'stage': stage_name, + 'variables': variables, + 'script': job_scripts, + 'tags': tags, + 'artifacts': { + 'paths': [ + 'jobs_scratch_dir', + 'cdash_report', + 'local_mirror/build_cache', + ], + 'when': 'always', + }, + 'dependencies': job_dependencies, + } + + if image_name: + job_object['image'] = image_name + if image_entry is not None: + job_object['image'] = { + 'name': image_name, + 'entrypoint': image_entry, + } + + output_object[job_name] = job_object + job_id += 1 tty.msg('{0} build jobs generated in {1} stages'.format( - job_count, len(stages))) + job_id, stage_id)) # Use "all_job_names" to populate the build group for this set - if cdash_auth_token: + if enable_cdash_reporting and cdash_auth_token: try: populate_buildgroup(all_job_names, build_group, cdash_project, cdash_site, cdash_auth_token, cdash_url) @@ -521,7 +646,7 @@ def release_jobs(parser, args): 'variables': { 'MIRROR_URL': mirror_urls[0], }, - 'image': 'scottwittenburg/spack_ci_generator_alpine', # just needs some basic python image + 'image': 'scottwittenburg/spack_ci_generator_alpine', 'script': './bin/rebuild-index.sh', 'tags': ['spack-k8s'] # may want a runner to handle this } diff --git a/lib/spack/spack/schema/gitlab_ci.py b/lib/spack/spack/schema/gitlab_ci.py index 0765d311f4..dc7b16fb21 100644 --- a/lib/spack/spack/schema/gitlab_ci.py +++ b/lib/spack/spack/schema/gitlab_ci.py @@ -17,42 +17,77 @@ properties = { 'additionalProperties': False, 'required': ['mappings'], 'patternProperties': { - r'mappings': { + 'bootstrap': { 'type': 'array', - 'default': {}, - 'additionalProperties': False, - 'patternProperties': { - r'[\w\d\-_\.]+': { - 'type': 'object', - 'additionalProperties': False, - 'required': ['match', 'runner-attributes'], - 'properties': { - 'match': { - 'type': 'array', - 'default': [], - 'items': { + 'items': { + 'anyOf': [ + { + 'type': 'string', + }, { + 'type': 'object', + 'additionalProperties': False, + 'required': ['name'], + 'properties': { + 'name': { 'type': 'string', }, + 'compiler-agnostic': { + 'type': 'boolean', + 'default': False, + }, }, - 'runner-attributes': { - 'type': 'object', - 'additionalProperties': True, - 'required': ['tags'], - 'properties': { - 'image': {'type': 'string'}, - 'tags': { - 'type': 'array', - 'default': [], - 'items': {'type': 'string'} - }, - 'variables': { - 'type': 'object', - 'default': {}, - 'patternProperties': { - r'[\w\d\-_\.]+': { - 'type': 'string', + }, + ], + }, + }, + 'mappings': { + 'type': 'array', + 'items': { + 'type': 'object', + 'additionalProperties': False, + 'required': ['match', 'runner-attributes'], + 'properties': { + 'match': { + 'type': 'array', + 'items': { + 'type': 'string', + }, + }, + 'runner-attributes': { + 'type': 'object', + 'additionalProperties': True, + 'required': ['tags'], + 'properties': { + 'image': { + 'oneOf': [ + { + 'type': 'string' + }, { + 'type': 'object', + 'properties': { + 'name': {'type': 'string'}, + 'entrypoint': { + 'type': 'array', + 'items': { + 'type': 'string', + }, + }, }, }, + ], + }, + 'tags': { + 'type': 'array', + 'default': [], + 'items': {'type': 'string'} + }, + 'variables': { + 'type': 'object', + 'default': {}, + 'patternProperties': { + r'[\w\d\-_\.]+': { + 'type': 'string', + }, }, }, }, diff --git a/lib/spack/spack/schema/specs_deps.py b/lib/spack/spack/schema/specs_deps.py deleted file mode 100644 index 0ac029c25a..0000000000 --- a/lib/spack/spack/schema/specs_deps.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other -# Spack Project Developers. See the top-level COPYRIGHT file for details. -# -# SPDX-License-Identifier: (Apache-2.0 OR MIT) - -"""Schema for expressing dependencies of a set of specs in a JSON file - -.. literalinclude:: _spack_root/lib/spack/spack/schema/specs_deps.py - :lines: 32- -""" - - -schema = { - '$schema': 'http://json-schema.org/schema#', - 'title': 'Spack schema for the dependencies of a set of specs', - 'type': 'object', - 'additionalProperties': False, - 'required': ['specs'], - 'properties': { - r'dependencies': { - 'type': 'array', - 'default': [], - 'items': { - 'type': 'object', - 'additionalProperties': False, - 'required': ['depends', 'spec'], - 'properties': { - r'depends': {'type': 'string'}, - r'spec': {'type': 'string'}, - }, - }, - }, - r'specs': { - 'type': 'array', - 'default': [], - 'items': { - 'type': 'object', - 'additionalProperties': False, - 'required': ['root_spec', 'spec', 'label'], - 'properties': { - r'root_spec': {'type': 'string'}, - r'spec': {'type': 'string'}, - r'label': {'type': 'string'}, - } - }, - }, - }, -} diff --git a/lib/spack/spack/test/cmd/release_jobs.py b/lib/spack/spack/test/cmd/release_jobs.py index 5197bfef22..c4f44308d9 100644 --- a/lib/spack/spack/test/cmd/release_jobs.py +++ b/lib/spack/spack/test/cmd/release_jobs.py @@ -102,8 +102,7 @@ spack: some-mirror: https://my.fake.mirror gitlab-ci: mappings: - - some-runner-mapping: - match: + - match: - archive-files runner-attributes: tags: |