Newer
Older
Athanasios Karmas
committed
# start an OpenShift Job that will build the Spack environment
.deploy-build-environment:
stage: deploy
before_script:
- oc login "$OPENSHIFT_SERVER" --token="$OPENSHIFT_TOKEN"
Athanasios Karmas
committed
- tar czf ${SPACK_ENV_TAR_FILE} packages/ repo.yaml spack.yaml create_JupyterLab_kernel.sh
Athanasios Karmas
committed
- mkdir copy_folder
Athanasios Karmas
committed
- mv ${SPACK_ENV_TAR_FILE} copy_folder
script:
# create job description file
- chmod a+x create_job.sh
- ./create_job.sh $INSTALLATION_ROOT $SPACKIFIED_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $LAB_KERNEL_PATH $OKD_CLUSTER_UID
Athanasios Karmas
committed
- oc project $OC_PROJECT
Athanasios Karmas
committed
## wait for job to finish https://stackoverflow.com/questions/5073453wait-for-kubernetes-job-to-complete-on-either-failure-success-using-command-line
- oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status}' -w && oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status.conditions[*].type}' | grep -i -E 'failed|complete' || echo 'Failed'
Athanasios Karmas
committed
# wait for job's pod to become available so as to copy from the gitlab runner to the OpenShift pod
# the necessary files that define the environment that spack needs to build
Athanasios Karmas
committed
- while true; do x=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $3}');if [ $x == "Running" ]; then break; fi; sleep 10; done
Athanasios Karmas
committed
- pod=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $1}')
- oc rsync copy_folder $pod:/opt/app-root/src
Athanasios Karmas
committed
# when build job finishes get the logs
Athanasios Karmas
committed
- while true; do sleep 300; x=$(oc get pods |grep $pod|awk '{ print $3}');if [ $x != "Running" ]; then break; fi; done
Athanasios Karmas
committed
- oc logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt
# if spack install has failed, fail the pipeline
Athanasios Karmas
committed
# - if [ $(cat log.txt |grep "Error:"|wc -l) -gt 0 ]; then exit 1;fi;
Athanasios Karmas
committed
# delete the job from OpenShift as we have the logs here
Athanasios Karmas
committed
- oc delete job simplejob${CI_PIPELINE_ID} || true
# deploy on the dev environment of the okd dev cluster at CSCS
Athanasios Karmas
committed
# runs on protected branches only as the token variable is protected
deploy-dev-environment-cscs:
Athanasios Karmas
committed
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_DEV
OP: $CSCS_OPERATION_DEV
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_DEV
OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
Athanasios Karmas
committed
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub-int
resource_group: shared-NFS-mount-dev-cscs
Athanasios Karmas
committed
only:
- master
# deploy on the production environment of the okd prod cluster at CSCS
Athanasios Karmas
committed
# runs on protected branches only as the token variable is protected
deploy-prod-environment-cscs:
Athanasios Karmas
committed
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_PROD
OP: $CSCS_OPERATION_PROD
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_PROD
OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
Athanasios Karmas
committed
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
resource_group: shared-NFS-mount-prod-cscs
Athanasios Karmas
committed
rules:
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /release/'
when: manual
allow_failure: false
# deploy on the production environment of the okd prod cluster at JSC
# runs on protected branches only as the token variable is protected
deploy-prod-environment-jsc:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: $JSC_SPACKIFIED_ENV_PROD
OP: $JSC_OPERATION_PROD
BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: $JSC_LAB_KERNEL_PATH_PROD
OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
resource_group: shared-NFS-mount-prod-jsc
rules:
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /release/'
when: manual
allow_failure: false