Newer
Older
Eleni Mathioulaki
committed
- build
Eleni Mathioulaki
committed
- sync
Athanasios Karmas
committed
# start an OpenShift Job that will build the Spack environment
.deploy-build-environment:
- oc login "$OPENSHIFT_SERVER" --token="$OPENSHIFT_TOKEN"
Athanasios Karmas
committed
- tar czf ${SPACK_ENV_TAR_FILE} packages/ repo.yaml spack.yaml create_JupyterLab_kernel.sh
Athanasios Karmas
committed
- mkdir copy_folder
Athanasios Karmas
committed
- mv ${SPACK_ENV_TAR_FILE} copy_folder
# create job description file
- chmod a+x create_job.sh
- ./create_job.sh $INSTALLATION_ROOT $SPACKIFIED_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $LAB_KERNEL_PATH $OKD_CLUSTER_UID
Athanasios Karmas
committed
- oc project $OC_PROJECT
Athanasios Karmas
committed
## wait for job to finish https://stackoverflow.com/questions/5073453wait-for-kubernetes-job-to-complete-on-either-failure-success-using-command-line
- oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status}' -w && oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status.conditions[*].type}' | grep -i -E 'failed|complete' || echo 'Failed'
Athanasios Karmas
committed
# wait for job's pod to become available so as to copy from the gitlab runner to the OpenShift pod
# the necessary files that define the environment that spack needs to build
Athanasios Karmas
committed
- while true; do x=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $3}');if [ $x == "Running" ]; then break; fi; sleep 10; done
Athanasios Karmas
committed
- pod=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $1}')
- oc rsync copy_folder $pod:/opt/app-root/src
Athanasios Karmas
committed
# when build job finishes get the logs
Athanasios Karmas
committed
- while true; do sleep 300; x=$(oc get pods |grep $pod|awk '{ print $3}');if [ $x != "Running" ]; then break; fi; done
Athanasios Karmas
committed
- oc logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt
# if spack install has failed, fail the pipeline
- if [ $(cat log.txt |grep "No module available for package"|wc -l) -gt 0 ]; then exit 1;fi;
Athanasios Karmas
committed
# delete the job from OpenShift as we have the logs here
Athanasios Karmas
committed
- oc delete job simplejob${CI_PIPELINE_ID} || true
# Deploy in the lab-int environment the version of the tools to be
# tested before released to production (push pipeline)
# deploy on the dev environment of the okd dev cluster at CSCS
Athanasios Karmas
committed
# runs on protected branches only as the token variable is protected
deploy-int-release-dev-cscs:
Athanasios Karmas
committed
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_DEV
OP: $CSCS_OPERATION_DEV
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_DEV
OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
Athanasios Karmas
committed
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub-int
resource_group: shared-NFS-mount-dev-cscs
Athanasios Karmas
committed
only:
- master
except:
variables:
- $CI_PIPELINE_SOURCE == "schedule"
Athanasios Karmas
committed
# Deploy the production release of tools (manual pipeline)
# deploy on the production environment of the okd prod cluster at CSCS
Athanasios Karmas
committed
# runs on protected branches only as the token variable is protected
deploy-prod-release-prod-cscs:
Athanasios Karmas
committed
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_PROD
OP: $CSCS_OPERATION_PROD
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_PROD
OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
Athanasios Karmas
committed
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
resource_group: shared-NFS-mount-prod-cscs
Athanasios Karmas
committed
rules:
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/'
Athanasios Karmas
committed
when: manual
allow_failure: false
# Deploy the production release of tools (manual pipeline)
# deploy on the production environment of the okd prod cluster at JSC
# runs on protected branches only as the token variable is protected
deploy-prod-release-prod-jsc:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: $JSC_SPACKIFIED_ENV_PROD
OP: $JSC_OPERATION_PROD
BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: $JSC_LAB_KERNEL_PATH_PROD
OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
resource_group: shared-NFS-mount-prod-jsc
rules:
- if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/'
when: manual
allow_failure: false
# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the dev environment of the okd dev cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-exp-release-dev-cscs:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
SPACKIFIED_ENV: experimental
OP: update
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
LAB_KERNEL_PATH: /srv/jupyterlab_kernels/int/experimental
OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub-int
before_script:
- |
head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
cat << EOS >> create_JupyterLab_kernel.sh
mkdir \$LAB_KERNEL_PATH/spack_experimental_release
cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
{
"argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
"display_name": "EBRAINS_experimental_release",
"env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%Y-%m-%d")" }
}
EOF
EOS
resource_group: shared-NFS-mount-dev-cscs
only:
refs:
- schedules
- $RELEASE == "experimental-dev"
allow_failure: false
# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the prod environment of the okd prod cluster at CSCS
# runs on protected branches only as the token variable is protected
deploy-exp-release-prod-cscs:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: experimental
OP: update
BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
before_script:
- |
head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
cat << EOS >> create_JupyterLab_kernel.sh
mkdir \$LAB_KERNEL_PATH/spack_experimental_release
cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
{
"argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
"display_name": "EBRAINS_experimental_release",
"env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%Y-%m-%d")" }
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
}
EOF
EOS
resource_group: shared-NFS-mount-prod-cscs
only:
refs:
- schedules
variables:
- $RELEASE == "experimental"
allow_failure: false
# Deploy the experimental release of tools (sheduled pipeline)
# once a week from latest working version of integration release
# (branch=experimental_release) to an experimental JupyterLab kernel
# deploy on the prod environment of the okd prod cluster at JSC
# runs on protected branches only as the token variable is protected
deploy-exp-release-prod-jsc:
extends: .deploy-build-environment
variables:
OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
SPACKIFIED_ENV: experimental
OP: update
BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
OC_PROJECT: jupyterhub
before_script:
- |
head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
cat << EOS >> create_JupyterLab_kernel.sh
mkdir \$LAB_KERNEL_PATH/spack_experimental_release
cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
{
"argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
"display_name": "EBRAINS_experimental_release",
"env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%Y-%m-%d")" }
}
EOF
EOS
resource_group: shared-NFS-mount-prod-jsc
only:
refs:
- schedules
variables:
- $RELEASE == "experimental"
allow_failure: false
Eleni Mathioulaki
committed
build-spack-env-on-runner:
stage: build
rules:
- if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $CI_MERGE_REQUEST_TARGET_BRANCH_NAME == 'test-spack-0.18.1'
Eleni Mathioulaki
committed
tags:
- docker-runner
- read-only
image: docker-registry.ebrains.eu/tc/ebrains-spack-build-env:gitlab_runners_latest
variables:
SPACK_DEV_ENV: ebrains-dev
Eleni Mathioulaki
committed
SPACK_DEV_PATH: $CI_PROJECT_DIR/spack
SPACK_USER_CACHE_PATH: $CI_PROJECT_DIR/.spack
SPACK_USER_CONFIG_PATH: $CI_PROJECT_DIR/.spack
TMP: $CI_PROJECT_DIR/.spack-tmp
script:
- git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $SPACK_DEV_PATH
Eleni Mathioulaki
committed
- mkdir $TMP
- cp packages.yaml $SPACK_DEV_PATH/etc/spack/packages.yaml
Eleni Mathioulaki
committed
- |
cat <<EOF > $SPACK_DEV_PATH/etc/spack/defaults/upstreams.yaml
upstreams:
ebrains-gitlab-spack-instance:
install_tree: /mnt/spack_v0.18.1/opt/spack
Eleni Mathioulaki
committed
EOF
- . $SPACK_DEV_PATH/share/spack/setup-env.sh
- spack find
- spack load gcc@10.3.0
- spack compiler find
- spack repo add .
- spack repo list
- spack env create $SPACK_DEV_ENV spack.yaml
- spack env activate $SPACK_DEV_ENV
- spack concretize -f --fresh
- spack install -y --fresh --no-check-signature
Eleni Mathioulaki
committed
# cache:
# key: spack-cache-$CI_COMMIT_REF_SLUG
# paths:
# - "$SPACK_DEV_PATH"
# - "$SPACK_USER_CONFIG_PATH"
# when: always
Eleni Mathioulaki
committed
artifacts:
paths:
- .spack-tmp/root/spack-stage
Eleni Mathioulaki
committed
when: on_failure
Eleni Mathioulaki
committed
Eleni Mathioulaki
committed
stage: sync
tags:
- docker-runner
- read-write
image: docker-registry.ebrains.eu/tc/ebrains-spack-build-env:gitlab_runners_nfs_latest
variables:
SPACK_NFS_ENV: ebrains-runner-build
SPACK_VERSION: v0.18.1
SPACK_PATH: /mnt/spack_v0.18.1
Eleni Mathioulaki
committed
SPACK_USER_CACHE_PATH: $SPACK_PATH/.spack
SPACK_USER_CONFIG_PATH: $SPACK_PATH/.spack
SPACK_REPO_PATH: $SPACK_PATH/ebrains-spack-builds
Eleni Mathioulaki
committed
script:
- |
if [ ! -d $SPACK_PATH ]; then
git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $SPACK_PATH
cp $CI_PROJECT_DIR/packages.yaml $SPACK_PATH/etc/spack/packages.yaml
Eleni Mathioulaki
committed
fi
- . $SPACK_PATH/share/spack/setup-env.sh
# - spack bootstrap untrust github-actions
- spack compiler find
- spack compiler list
- spack load gcc@10.3.0 || spack install gcc@10.3.0
- spack load gcc@10.3.0
- spack compiler find
- spack compiler list
- spack install python@3.8.11 %gcc@10.3.0 || (cp -r /tmp/spack/spack-stage $CI_PROJECT_DIR/build_logs; exit 1)
Eleni Mathioulaki
committed
# - for section in $(spack config list); do spack config blame $section; done
- mkdir -p $SPACK_REPO_PATH && cp -r -t $SPACK_REPO_PATH $CI_PROJECT_DIR/{packages,repo.yaml}
- spack repo list | grep -q ebrains-spack-builds && echo "Repository registered already" || spack repo add $SPACK_REPO_PATH
Eleni Mathioulaki
committed
- spack repo list
- spack env list | grep -q $SPACK_NFS_ENV && echo "Environment created already" || spack env create $SPACK_NFS_ENV $CI_PROJECT_DIR/spack.yaml
- spack env activate $SPACK_NFS_ENV
- cp $CI_PROJECT_DIR/spack.yaml $SPACK_ROOT/var/spack/environments/$SPACK_NFS_ENV/spack.yaml
- spack concretize -f --fresh
- spack install -y --fresh --no-check-signature || (cp -r /tmp/spack/spack-stage $CI_PROJECT_DIR/build_logs; exit 1)
- spack module tcl refresh -y
- spack reindex
artifacts:
paths:
- build_logs/
when: on_failure
- if: '$CI_COMMIT_BRANCH =~ /test-spack-0.18.1/'