Skip to content
Snippets Groups Projects
.gitlab-ci.yml 10 KiB
Newer Older
Athanasios Karmas's avatar
Athanasios Karmas committed
stages:
  - deploy
Athanasios Karmas's avatar
Athanasios Karmas committed

# start an OpenShift Job that will build the Spack environment
.deploy-build-environment:
  stage: deploy
    - oc login "$OPENSHIFT_SERVER" --token="$OPENSHIFT_TOKEN"
    - tar czf ${SPACK_ENV_TAR_FILE} packages/ repo.yaml spack.yaml create_JupyterLab_kernel.sh
    # create job description file
    - chmod a+x create_job.sh
    - ./create_job.sh $INSTALLATION_ROOT $SPACKIFIED_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $LAB_KERNEL_PATH $OKD_CLUSTER_UID
Athanasios Karmas's avatar
Athanasios Karmas committed
    - cat simplejob.yml
    # select the project in openshift
    # start the deploy job
Athanasios Karmas's avatar
Athanasios Karmas committed
    - oc create -f simplejob.yml
    ## wait for job to finish https://stackoverflow.com/questions/5073453wait-for-kubernetes-job-to-complete-on-either-failure-success-using-command-line
Athanasios Karmas's avatar
Athanasios Karmas committed
    - oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status}' -w && oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status.conditions[*].type}' | grep -i -E 'failed|complete' || echo 'Failed'
    # wait for job's pod to become available so as to copy from the gitlab runner to the OpenShift pod 
    # the necessary files that define the environment that spack needs to build
    - while true; do x=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $3}');if [ $x == "Running" ]; then break; fi; sleep 10; done
    - pod=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $1}')
    - oc rsync copy_folder $pod:/opt/app-root/src
    - while true; do sleep 300; x=$(oc get pods |grep $pod|awk '{ print $3}');if [ $x != "Running" ]; then break; fi; done 
    - oc logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt
    # if spack install has failed, fail the pipeline
Athanasios Karmas's avatar
Athanasios Karmas committed
    - if [ $(cat log.txt |grep "No module available for package"|wc -l) -gt 0 ]; then exit 1;fi;
    # delete the job from OpenShift as we have the logs here
    - oc delete job simplejob${CI_PIPELINE_ID} || true
  tags:
    - shell-runner
# Deploy in the lab-int environment the version of the tools to be
# tested before released to production (push pipeline)
# deploy on the dev environment of the okd dev cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-int-release-dev-cscs:
  extends: .deploy-build-environment
  variables:
    OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
    OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
    SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_DEV
    OP: $CSCS_OPERATION_DEV
    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
    LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_DEV
    OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
    OC_PROJECT: jupyterhub-int
  resource_group: shared-NFS-mount-dev-cscs
Athanasios Karmas's avatar
Athanasios Karmas committed
  except:
    variables:
      - $CI_PIPELINE_SOURCE == "schedule"
# Deploy the production release of tools (manual pipeline)
# deploy on the production environment of the okd prod cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-prod-release-prod-cscs:
  extends: .deploy-build-environment
  variables:
    OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
    OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
    SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_PROD
    OP: $CSCS_OPERATION_PROD
    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
    LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_PROD
    OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
    OC_PROJECT: jupyterhub
  resource_group: shared-NFS-mount-prod-cscs
    - if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/'
# Deploy the production release of tools (manual pipeline)
# deploy on the production environment of the okd prod cluster at JSC
# runs on protected branches only as the token variable is protected
deploy-prod-release-prod-jsc:
  extends: .deploy-build-environment
  variables:
    OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
    OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
    INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
    SPACKIFIED_ENV: $JSC_SPACKIFIED_ENV_PROD
    OP: $JSC_OPERATION_PROD
    BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
    LAB_KERNEL_PATH: $JSC_LAB_KERNEL_PATH_PROD
    OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
    OC_PROJECT: jupyterhub
  resource_group: shared-NFS-mount-prod-jsc
  rules:
    - if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/'
      when: manual
      allow_failure: false

# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release 
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the dev environment of the okd dev cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-exp-release-dev-cscs:
  extends: .deploy-build-environment
  variables:
    OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
    OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
    SPACKIFIED_ENV: experimental
    OP: update
    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
    LAB_KERNEL_PATH: /srv/jupyterlab_kernels/int/experimental
    OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
    OC_PROJECT: jupyterhub-int
  before_script:
    - | 
        head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
        cat << EOS >> create_JupyterLab_kernel.sh
        mkdir \$LAB_KERNEL_PATH/spack_experimental_release
        cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
        {
         "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
         "display_name": "EBRAINS_experimental_release",
         "name": "spack_experimental_release",
         "language": "python",
         "env": { "LAB_KERNEL_INFO": "spack_experimental_release_\$(date +"%d-%m-%y")" }
        }
        EOF
        EOS
  resource_group: shared-NFS-mount-dev-cscs
  only:
    refs:
      - schedules
      - $RELEASE == "experimental-dev"

# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release 
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the prod environment of the okd prod cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-exp-release-prod-cscs:
  extends: .deploy-build-environment
  variables:
    OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
    OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
    SPACKIFIED_ENV: experimental
    OP: update
    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
    LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
    OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
    OC_PROJECT: jupyterhub
  before_script:
    - | 
        head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
        cat << EOS >> create_JupyterLab_kernel.sh
        mkdir \$LAB_KERNEL_PATH/spack_experimental_release
        cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
        {
         "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
         "display_name": "EBRAINS_experimental_release",
         "name": "spack_experimental_release",
         "language": "python",
         "env": { "LAB_KERNEL_INFO": "spack_experimental_release_\$(date +"%d-%m-%y")" }
        }
        EOF
        EOS
  resource_group: shared-NFS-mount-prod-cscs
  only:
    refs:
      - schedules
    variables:
      - $RELEASE == "experimental"
  allow_failure: false

# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release 
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the prod environment of the okd prod cluster at JSC
# runs on protected branches only as the token variable is protected
deploy-exp-release-prod-jsc:
  extends: .deploy-build-environment
  variables:
    OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
    OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
    INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
    SPACKIFIED_ENV: experimental
    OP: update
    BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
    LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
    OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
    OC_PROJECT: jupyterhub
  before_script:
    - | 
        head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
        cat << EOS >> create_JupyterLab_kernel.sh
        mkdir \$LAB_KERNEL_PATH/spack_experimental_release
        cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
        {
         "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
         "display_name": "EBRAINS_experimental_release",
         "name": "spack_experimental_release",
         "language": "python",
         "env": { "LAB_KERNEL_INFO": "spack_experimental_release_\$(date +"%d-%m-%y")" }
        }
        EOF
        EOS
  resource_group: shared-NFS-mount-prod-jsc
  only:
    refs:
      - schedules
    variables:
      - $RELEASE == "experimental"
  allow_failure: false