Skip to content
Snippets Groups Projects
Commit 0c9fa2cb authored by Athanasios Karmas's avatar Athanasios Karmas
Browse files

Updates for extended support of spack envs in the TC pipelines

parent da0b67cb
No related branches found
No related tags found
No related merge requests found
Pipeline #10834 failed with stage
in 14 minutes and 23 seconds
......@@ -3,7 +3,8 @@ stages:
variables:
OPENSHIFT_SERVER: $OPENSHIFT_DEV_SERVER
BUILD_ENV: $BUILD_ENV
INSTALLATION_ROOT: $INSTALLATION_ROOT
SPACKIFIED_ENV: $SPACKIFIED_ENV
OP: $OPERATION
#SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
......@@ -18,7 +19,7 @@ deploy-build-environment:
script:
# create job description file
- chmod a+x create_job.sh
- ./create_job.sh $BUILD_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID
- ./create_job.sh $INSTALLATION_ROOT $SPACKIFIED_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID
- cat simplejob.yml
# select the project in openshift
- oc project jupyterhub-int
......
......@@ -4,13 +4,14 @@
#usage :./create_JupyterLab_kernel.sh $BUILD-ENV
#==============================================================================
ENV=$1
INSTALLATION_ROOT=$1
SPACKIFIED_ENV=$2
CONF_PATH="/srv/jupyterlab_kernels/int/release20210930"
# load spack and spack repos
cp -r /srv/$ENV/spack/.spack ~
source /srv/$ENV/spack/share/spack/setup-env.sh
spack repo add /srv/$ENV/ebrains-spack-builds
cp -r /srv/$INSTALLATION_ROOT/spack/.spack ~
source /srv/$INSTALLATION_ROOT/spack/share/spack/setup-env.sh
spack repo add /srv/$INSTALLATION_ROOT/ebrains-spack-builds
# prepare the env file
mkdir $CONF_PATH/bin
......@@ -20,7 +21,8 @@ set -euxo pipefail
EOF
# load here all tools
spack load --sh -r python@3.8.11 py-ipykernel py-pip py-numpy@1.21.0 py-scipy py-pandas py-seaborn py-matplotlib arbor nest@3.0 neuron py-pynn tvb-data tvb-library %gcc@10.3.0 >> $CONF_PATH/bin/env.sh
#spack load --sh -r python@3.8.11 py-ipykernel py-pip py-numpy@1.21.0 py-scipy py-pandas py-seaborn py-matplotlib arbor nest@3.0 neuron py-pynn tvb-data tvb-library %gcc@10.3.0 >> $CONF_PATH/bin/env.sh
spack env activate $SPACKIFIED_ENV --sh >> $CONF_PATH/bin/env.sh
# end of env creation
cat <<EOF >>$CONF_PATH/bin/env.sh
......
#!/bin/bash
BUILD_ENV=$1
OP=$2
SPACK_ENV_TAR_FILE=$3
OC_JOB_ID=$4
INSTALLATION_ROOT=$1
SPACKIFIED_ENV=$2
OP=$3
SPACK_ENV_TAR_FILE=$4
OC_JOB_ID=$5
cat <<EOT >> simplejob.yml
apiVersion: batch/v1
......@@ -30,7 +31,7 @@ spec:
volumeMounts:
- name: sharedbin
mountPath: /srv
command: ["/usr/local/bin/deploy-build-env.sh", "$BUILD_ENV", "$OP", "$SPACK_ENV_TAR_FILE"]
command: ["/usr/local/bin/deploy-build-env.sh", "$INSTALLATION_ROOT", "$SPACKIFIED_ENV", "$OP", "$SPACK_ENV_TAR_FILE"]
volumes:
- name: sharedbin
persistentVolumeClaim:
......
......@@ -13,22 +13,7 @@ source /srv/test-build/spack/share/spack/setup-env.sh
cd /srv/test-build/
spack repo add ebrains-spack-builds
cd ~
echo "Loading packages..."
spack load -r python@3.8.11 %gcc@10.3.0
spack load -r py-numpy@1.21.0 %gcc@10.3.0
spack load -r py-pip %gcc@10.3.0
spack load -r py-scipy %gcc@10.3.0
spack load -r py-pandas %gcc@10.3.0
spack load -r py-seaborn %gcc@10.3.0
spack load -r py-matplotlib %gcc@10.3.0
spack load -r arbor %gcc@10.3.0
spack load -r neuron %gcc@10.3.0
spack load -r nest@3.0 %gcc@10.3.0
spack load -r py-pynn %gcc@10.3.0
spack load -r tvb-data %gcc@10.3.0
spack load -r tvb-library %gcc@10.3.0
spack env activate ebrains-spack-builds
echo "Everything ready!"
#echo "Starting Python..."
......
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment