stages: - build - deploy - sync # start an OpenShift Job that will build the Spack environment .deploy-build-environment: stage: deploy script: - oc login "$OPENSHIFT_SERVER" --token="$OPENSHIFT_TOKEN" - tar czf ${SPACK_ENV_TAR_FILE} packages/ repo.yaml spack.yaml create_JupyterLab_kernel.sh - mkdir copy_folder - mv ${SPACK_ENV_TAR_FILE} copy_folder # create job description file - chmod a+x create_job.sh - ./create_job.sh $INSTALLATION_ROOT $SPACKIFIED_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $LAB_KERNEL_PATH $OKD_CLUSTER_UID - cat simplejob.yml # select the project in openshift - oc project $OC_PROJECT # start the deploy job - oc create -f simplejob.yml ## wait for job to finish https://stackoverflow.com/questions/5073453wait-for-kubernetes-job-to-complete-on-either-failure-success-using-command-line - oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status}' -w && oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status.conditions[*].type}' | grep -i -E 'failed|complete' || echo 'Failed' # wait for job's pod to become available so as to copy from the gitlab runner to the OpenShift pod # the necessary files that define the environment that spack needs to build - while true; do x=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $3}');if [ $x == "Running" ]; then break; fi; sleep 10; done - pod=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $1}') - oc rsync copy_folder $pod:/opt/app-root/src # when build job finishes get the logs - while true; do sleep 300; x=$(oc get pods |grep $pod|awk '{ print $3}');if [ $x != "Running" ]; then break; fi; done - oc logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt # if spack install has failed, fail the pipeline - if [ $(cat log.txt |grep "No module available for package"|wc -l) -gt 0 ]; then exit 1;fi; # delete the job from OpenShift as we have the logs here - oc delete job simplejob${CI_PIPELINE_ID} || true tags: - shell-runner # Deploy in the lab-int environment the version of the tools to be # tested before released to production (push pipeline) # deploy on the dev environment of the okd dev cluster at CSCS # runs on protected branches only as the token variable is protected deploy-int-release-dev-cscs: extends: .deploy-build-environment variables: OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_DEV OP: $CSCS_OPERATION_DEV BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_DEV OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz OC_PROJECT: jupyterhub-int resource_group: shared-NFS-mount-dev-cscs only: - master except: variables: - $CI_PIPELINE_SOURCE == "schedule" # Deploy the production release of tools (manual pipeline) # deploy on the production environment of the okd prod cluster at CSCS # runs on protected branches only as the token variable is protected deploy-prod-release-prod-cscs: extends: .deploy-build-environment variables: OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_PROD OP: $CSCS_OPERATION_PROD BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_PROD OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz OC_PROJECT: jupyterhub resource_group: shared-NFS-mount-prod-cscs rules: - if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/' when: manual allow_failure: false # Deploy the production release of tools (manual pipeline) # deploy on the production environment of the okd prod cluster at JSC # runs on protected branches only as the token variable is protected deploy-prod-release-prod-jsc: extends: .deploy-build-environment variables: OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD SPACKIFIED_ENV: $JSC_SPACKIFIED_ENV_PROD OP: $JSC_OPERATION_PROD BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD LAB_KERNEL_PATH: $JSC_LAB_KERNEL_PATH_PROD OKD_CLUSTER_UID: $JSC_OKD_PROD_UID #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz OC_PROJECT: jupyterhub resource_group: shared-NFS-mount-prod-jsc rules: - if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/' when: manual allow_failure: false # Deploy the experimental release of tools (sheduled pipeline) # once a week from latest working version of integration release # (branch=experimental_release) to an experimental JupyterLab kernel # deploy on the dev environment of the okd dev cluster at CSCS # runs on protected branches only as the token variable is protected deploy-exp-release-dev-cscs: extends: .deploy-build-environment variables: OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV SPACKIFIED_ENV: experimental OP: update BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV LAB_KERNEL_PATH: /srv/jupyterlab_kernels/int/experimental OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz OC_PROJECT: jupyterhub-int before_script: - | head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh cat << EOS >> create_JupyterLab_kernel.sh mkdir \$LAB_KERNEL_PATH/spack_experimental_release cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json { "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"], "display_name": "EBRAINS_experimental_release", "name": "spack_experimental_release", "language": "python", "env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%y.%m.%d")" } } EOF EOS resource_group: shared-NFS-mount-dev-cscs only: refs: - schedules variables: - $RELEASE == "experimental-dev" allow_failure: false # Deploy the experimental release of tools (sheduled pipeline) # once a week from latest working version of integration release # (branch=experimental_release) to an experimental JupyterLab kernel # deploy on the prod environment of the okd prod cluster at CSCS # runs on protected branches only as the token variable is protected deploy-exp-release-prod-cscs: extends: .deploy-build-environment variables: OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD SPACKIFIED_ENV: experimental OP: update BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz OC_PROJECT: jupyterhub before_script: - | head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh cat << EOS >> create_JupyterLab_kernel.sh mkdir \$LAB_KERNEL_PATH/spack_experimental_release cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json { "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"], "display_name": "EBRAINS_experimental_release", "name": "spack_experimental_release", "language": "python", "env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%y.%m.%d")" } } EOF EOS resource_group: shared-NFS-mount-prod-cscs only: refs: - schedules variables: - $RELEASE == "experimental" allow_failure: false # Deploy the experimental release of tools (sheduled pipeline) # once a week from latest working version of integration release # (branch=experimental_release) to an experimental JupyterLab kernel # deploy on the prod environment of the okd prod cluster at JSC # runs on protected branches only as the token variable is protected deploy-exp-release-prod-jsc: extends: .deploy-build-environment variables: OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD SPACKIFIED_ENV: experimental OP: update BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental OKD_CLUSTER_UID: $JSC_OKD_PROD_UID #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz OC_PROJECT: jupyterhub before_script: - | head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh cat << EOS >> create_JupyterLab_kernel.sh mkdir \$LAB_KERNEL_PATH/spack_experimental_release cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json { "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"], "display_name": "EBRAINS_experimental_release", "name": "spack_experimental_release", "language": "python", "env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%y.%m.%d")" } } EOF EOS resource_group: shared-NFS-mount-prod-jsc only: refs: - schedules variables: - $RELEASE == "experimental" allow_failure: false build-spack-env-on-runner: stage: build rules: - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'master' tags: - docker-runner - read-only image: docker-registry.ebrains.eu/tc/ebrains-spack-build-env:gitlab_runners_latest variables: SPACK_DEV_ENV: ebrains-dev SPACK_DEV_PATH: $CI_PROJECT_DIR/spack SPACK_USER_CACHE_PATH: $CI_PROJECT_DIR/.spack SPACK_USER_CONFIG_PATH: $CI_PROJECT_DIR/.spack TMP: $CI_PROJECT_DIR/.spack-tmp script: - git clone -c feature.manyFiles=true https://github.com/spack/spack $SPACK_DEV_PATH - cd $SPACK_DEV_PATH - git checkout -b ebrains_spack_commit a8d440d3ababcdec20d665ad938ab880cd9b9d17 - cd ../ - mkdir $TMP - | cat <<EOF > $SPACK_DEV_PATH/etc/spack/packages.yaml packages: all: target: [x86_64] EOF cat <<EOF > $SPACK_DEV_PATH/etc/spack/defaults/upstreams.yaml upstreams: ebrains-gitlab-spack-instance: install_tree: /mnt/spack/opt/spack EOF - . $SPACK_DEV_PATH/share/spack/setup-env.sh - spack find - spack load gcc@10.3.0 - spack compiler find - spack repo add . - spack repo list - spack env create $SPACK_DEV_ENV spack.yaml - spack env activate $SPACK_DEV_ENV - spack concretize --reuse -f - spack install --reuse --no-check-signature # cache: # key: spack-cache-$CI_COMMIT_REF_SLUG # paths: # - "$SPACK_DEV_PATH" # - "$SPACK_USER_CONFIG_PATH" # when: always artifacts: paths: - .spack-tmp/spack/spack-stage when: on_failure sync-gitlab-spack-instance: stage: sync tags: - docker-runner - read-write image: docker-registry.ebrains.eu/tc/ebrains-spack-build-env:gitlab_runners_nfs_latest variables: SPACK_NFS_ENV: ebrains-runner-build SPACK_PATH: /mnt/spack SPACK_USER_CACHE_PATH: $SPACK_PATH/.spack SPACK_USER_CONFIG_PATH: $SPACK_PATH/.spack script: - rm -rf /mnt/build_logs/ - | if [ ! -d $SPACK_PATH ]; then # git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $SPACK_PATH git clone -c feature.manyFiles=true https://github.com/spack/spack $SPACK_PATH cd $SPACK_PATH git checkout -b ebrains_spack_commit a8d440d3ababcdec20d665ad938ab880cd9b9d17 cd ../ cat <<EOF > $SPACK_PATH/etc/spack/packages.yaml packages: all: target: [x86_64] EOF fi - . $SPACK_PATH/share/spack/setup-env.sh # - spack bootstrap untrust github-actions - spack compiler find - spack compiler list - spack load gcc@10.3.0 || spack install gcc@10.3.0 - spack load gcc@10.3.0 - spack compiler find - spack compiler list - spack install python@3.8.11 %gcc@10.3.0 || (cp -r /tmp/spack/spack-stage /mnt/build_logs; exit 1) # - for section in $(spack config list); do spack config blame $section; done - spack repo list | grep -q ebrains-spack-builds && echo "Repository registered already" || spack repo add $CI_PROJECT_DIR - spack repo list - spack env list | grep -q $SPACK_NFS_ENV && echo "Environment created already" || spack env create $SPACK_NFS_ENV $CI_PROJECT_DIR/spack.yaml - spack env activate $SPACK_NFS_ENV - cp $CI_PROJECT_DIR/spack.yaml $SPACK_ROOT/var/spack/environments/$SPACK_NFS_ENV/spack.yaml - spack concretize -f - spack install --no-check-signature || (cp -r /tmp/spack/spack-stage /mnt/build_logs; exit 1) timeout: 2 days only: refs: - schedules variables: - $SYNC_BRANCH == "experimental" allow_failure: false