diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 9a27aad80a66ca397816f4cee8a1bf487e28c6fd..9447a1a0dbdf7e835bbca3bc97c7dc64a21a6e84 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,34 +3,28 @@ stages:
   - deploy
   - sync
 
+variables:
+  SPACK_VERSION: v0.18.1
+  BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/tc/ebrains-spack-build-env/devel:test
+
 # start an OpenShift Job that will build the Spack environment
 .deploy-build-environment:
   stage: deploy
   script:
+    # login and select project in openshift
     - oc login "$OPENSHIFT_SERVER" --token="$OPENSHIFT_TOKEN"
-    - tar czf ${SPACK_ENV_TAR_FILE} packages/ repo.yaml spack.yaml create_JupyterLab_kernel.sh
-    - mkdir copy_folder
-    - mv ${SPACK_ENV_TAR_FILE} copy_folder
+    - oc project $OC_PROJECT
     # create job description file
     - chmod a+x create_job.sh
-    - ./create_job.sh $INSTALLATION_ROOT $SPACKIFIED_ENV $OP $SPACK_ENV_TAR_FILE $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE $LAB_KERNEL_PATH $OKD_CLUSTER_UID
+    - ./create_job.sh $CI_PIPELINE_ID $BUILD_ENV_DOCKER_IMAGE update $INSTALLATION_ROOT $SPACK_VERSION $SPACK_ENV $CI_COMMIT_BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT
     - cat simplejob.yml
-    # select the project in openshift
-    - oc project $OC_PROJECT
     # start the deploy job
     - oc create -f simplejob.yml
-    ## wait for job to finish https://stackoverflow.com/questions/5073453wait-for-kubernetes-job-to-complete-on-either-failure-success-using-command-line
-    - oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status}' -w && oc get job/simplejob${CI_PIPELINE_ID} -o=jsonpath='{.status.conditions[*].type}' | grep -i -E 'failed|complete' || echo 'Failed'
-    # wait for job's pod to become available so as to copy from the gitlab runner to the OpenShift pod 
-    # the necessary files that define the environment that spack needs to build
-    - while true; do x=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $3}');if [ $x == "Running" ]; then break; fi; sleep 10; done
-    - pod=$(oc get pods |grep simplejob${CI_PIPELINE_ID}|awk '{ print $1}')
-    - oc rsync copy_folder $pod:/opt/app-root/src
-    # when build job finishes get the logs
-    - while true; do sleep 300; x=$(oc get pods |grep $pod|awk '{ print $3}');if [ $x != "Running" ]; then break; fi; done 
+    # wait for job to finish to get the logs
+    - while true; do sleep 300; x=$(oc get pods | grep simplejob${CI_PIPELINE_ID} | awk '{ print $3}'); if [ $x != "Running" ]; then break; fi; done 
     - oc logs jobs/simplejob${CI_PIPELINE_ID} | tee log.txt
     # if spack install has failed, fail the pipeline
-    - if [ $(cat log.txt |grep "No module available for package"|wc -l) -gt 0 ]; then exit 1;fi;
+    - if [ $(cat log.txt | grep "No module available for package" | wc -l) -gt 0 ]; then exit 1; fi;
     # delete the job from OpenShift as we have the logs here
     - oc delete job simplejob${CI_PIPELINE_ID} || true
   tags:
@@ -45,15 +39,11 @@ deploy-int-release-dev-cscs:
   variables:
     OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
     OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
-    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
-    SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_DEV
-    OP: $CSCS_OPERATION_DEV
-    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
-    LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_DEV
-    OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
-    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
-    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
     OC_PROJECT: jupyterhub-int
+    LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/int
+    INSTALLATION_ROOT: /srv/test-build-2212
+    SPACK_ENV: test
+    RELEASE_NAME: EBRAINS-test
   resource_group: shared-NFS-mount-dev-cscs
   only:
     - master
@@ -61,6 +51,7 @@ deploy-int-release-dev-cscs:
     variables:
       - $CI_PIPELINE_SOURCE == "schedule"
 
+
 # Deploy the production release of tools (manual pipeline)
 # deploy on the production environment of the okd prod cluster at CSCS
 # runs on protected branches only as the token variable is protected
@@ -69,15 +60,11 @@ deploy-prod-release-prod-cscs:
   variables:
     OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
     OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
-    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
-    SPACKIFIED_ENV: $CSCS_SPACKIFIED_ENV_PROD
-    OP: $CSCS_OPERATION_PROD
-    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
-    LAB_KERNEL_PATH: $CSCS_LAB_KERNEL_PATH_PROD
-    OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
-    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
-    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
     OC_PROJECT: jupyterhub
+    LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod
+    INSTALLATION_ROOT: /srv/main-spack-instance-2212
+    SPACK_ENV: ebrains-23-01
+    RELEASE_NAME: EBRAINS-23.01
   resource_group: shared-NFS-mount-prod-cscs
   rules:
     - if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/'
@@ -92,15 +79,11 @@ deploy-prod-release-prod-jsc:
   variables:
     OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
     OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
-    INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
-    SPACKIFIED_ENV: $JSC_SPACKIFIED_ENV_PROD
-    OP: $JSC_OPERATION_PROD
-    BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
-    LAB_KERNEL_PATH: $JSC_LAB_KERNEL_PATH_PROD
-    OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
-    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
-    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
     OC_PROJECT: jupyterhub
+    LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod
+    INSTALLATION_ROOT: /srv/main-spack-instance-2212
+    SPACK_ENV: ebrains-23-01
+    RELEASE_NAME: EBRAINS-23.01
   resource_group: shared-NFS-mount-prod-jsc
   rules:
     - if: '$CI_COMMIT_BRANCH != $CI_DEFAULT_BRANCH && $CI_COMMIT_BRANCH =~ /ebrains/'
@@ -117,30 +100,11 @@ deploy-exp-release-dev-cscs:
   variables:
     OPENSHIFT_SERVER: $CSCS_OPENSHIFT_DEV_SERVER
     OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_DEV_TOKEN
-    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_DEV
-    SPACKIFIED_ENV: experimental
-    OP: update
-    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_DEV
-    LAB_KERNEL_PATH: /srv/jupyterlab_kernels/int/experimental
-    OKD_CLUSTER_UID: $CSCS_OKD_DEV_UID
-    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
-    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
     OC_PROJECT: jupyterhub-int
-  before_script:
-    - | 
-        head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
-        cat << EOS >> create_JupyterLab_kernel.sh
-        mkdir \$LAB_KERNEL_PATH/spack_experimental_release
-        cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
-        {
-         "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
-         "display_name": "EBRAINS_experimental_release",
-         "name": "spack_experimental_release",
-         "language": "python",
-         "env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%Y-%m-%d")" }
-        }
-        EOF
-        EOS
+    LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/int
+    INSTALLATION_ROOT: /srv/test-build-2212
+    SPACK_ENV: experimental
+    RELEASE_NAME: EBRAINS-experimental
   resource_group: shared-NFS-mount-dev-cscs
   only:
     refs:
@@ -159,30 +123,11 @@ deploy-exp-release-prod-cscs:
   variables:
     OPENSHIFT_SERVER: $CSCS_OPENSHIFT_PROD_SERVER
     OPENSHIFT_TOKEN: $CSCS_OPENSHIFT_PROD_TOKEN
-    INSTALLATION_ROOT: $CSCS_INSTALLATION_ROOT_PROD
-    SPACKIFIED_ENV: experimental
-    OP: update
-    BUILD_ENV_DOCKER_IMAGE: $CSCS_BUILD_ENV_DOCKER_IMAGE_PROD
-    LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
-    OKD_CLUSTER_UID: $CSCS_OKD_PROD_UID
-    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
-    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
     OC_PROJECT: jupyterhub
-  before_script:
-    - | 
-        head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
-        cat << EOS >> create_JupyterLab_kernel.sh
-        mkdir \$LAB_KERNEL_PATH/spack_experimental_release
-        cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
-        {
-         "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
-         "display_name": "EBRAINS_experimental_release",
-         "name": "spack_experimental_release",
-         "language": "python",
-         "env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%Y-%m-%d")" }
-        }
-        EOF
-        EOS
+    LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod
+    INSTALLATION_ROOT: /srv/main-spack-instance-2212
+    SPACK_ENV: experimental
+    RELEASE_NAME: EBRAINS-experimental
   resource_group: shared-NFS-mount-prod-cscs
   only:
     refs:
@@ -201,30 +146,11 @@ deploy-exp-release-prod-jsc:
   variables:
     OPENSHIFT_SERVER: $JSC_OPENSHIFT_PROD_SERVER
     OPENSHIFT_TOKEN: $JSC_OPENSHIFT_PROD_TOKEN
-    INSTALLATION_ROOT: $JSC_INSTALLATION_ROOT_PROD
-    SPACKIFIED_ENV: experimental
-    OP: update
-    BUILD_ENV_DOCKER_IMAGE: $JSC_BUILD_ENV_DOCKER_IMAGE_PROD
-    LAB_KERNEL_PATH: /srv/jupyterlab_kernels/prod/experimental
-    OKD_CLUSTER_UID: $JSC_OKD_PROD_UID
-    #SPACK_ENV_TAR_FILE: ebrains-spack-builds${CI_PIPELINE_ID}.tar.gz
-    SPACK_ENV_TAR_FILE: ebrains-spack-builds.tar.gz
     OC_PROJECT: jupyterhub
-  before_script:
-    - | 
-        head -n -9 create_JupyterLab_kernel.sh > tmp.txt && mv tmp.txt create_JupyterLab_kernel.sh
-        cat << EOS >> create_JupyterLab_kernel.sh
-        mkdir \$LAB_KERNEL_PATH/spack_experimental_release
-        cat <<EOF >\$LAB_KERNEL_PATH/spack_experimental_release/kernel.json
-        {
-         "argv": ["\$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
-         "display_name": "EBRAINS_experimental_release",
-         "name": "spack_experimental_release",
-         "language": "python",
-         "env": { "LAB_KERNEL_NAME": "EBRAINS_experimental_release", "LAB_KERNEL_RELEASE_DATE": "\$(date +"%Y-%m-%d")" }
-        }
-        EOF
-        EOS
+    LAB_KERNEL_ROOT: /srv/jupyterlab_kernels/prod
+    INSTALLATION_ROOT: /srv/main-spack-instance-2212
+    SPACK_ENV: experimental
+    RELEASE_NAME: EBRAINS-experimental
   resource_group: shared-NFS-mount-prod-jsc
   only:
     refs:
@@ -249,28 +175,24 @@ build-spack-env-on-runner:
     SPACK_USER_CONFIG_PATH: $CI_PROJECT_DIR/.spack
     TMP: $CI_PROJECT_DIR/.spack-tmp
   script:
-    - git clone -c feature.manyFiles=true https://github.com/spack/spack $SPACK_DEV_PATH
-    - cd $SPACK_DEV_PATH
-    - git checkout -b ebrains_spack_commit a8d440d3ababcdec20d665ad938ab880cd9b9d17
-    - cd ../
+    - git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $SPACK_DEV_PATH
     - mkdir $TMP
     - cp packages.yaml $SPACK_DEV_PATH/etc/spack/packages.yaml
     - |
       cat <<EOF > $SPACK_DEV_PATH/etc/spack/defaults/upstreams.yaml
       upstreams:
         ebrains-gitlab-spack-instance:
-          install_tree: /mnt/spack/opt/spack
+          install_tree: /mnt/spack_v0.18.1/opt/spack
       EOF
     - . $SPACK_DEV_PATH/share/spack/setup-env.sh
     - spack find
     - spack load gcc@10.3.0
     - spack compiler find
     - spack repo add .
-    - spack repo list
     - spack env create $SPACK_DEV_ENV spack.yaml
     - spack env activate $SPACK_DEV_ENV
-    - spack concretize -f
-    - spack install --no-check-signature
+    - spack concretize -f --fresh
+    - spack install -y --fresh --no-check-signature
   # cache:
   #   key: spack-cache-$CI_COMMIT_REF_SLUG
   #   paths:
@@ -292,18 +214,14 @@ sync-gitlab-spack-instance:
   image: docker-registry.ebrains.eu/tc/ebrains-spack-build-env:gitlab_runners_nfs_latest
   variables:
     SPACK_NFS_ENV: ebrains-runner-build
-    SPACK_PATH: /mnt/spack
+    SPACK_PATH: /mnt/spack_v0.18.1
     SPACK_USER_CACHE_PATH: $SPACK_PATH/.spack
     SPACK_USER_CONFIG_PATH: $SPACK_PATH/.spack
     SPACK_REPO_PATH: $SPACK_PATH/ebrains-spack-builds
   script:
     - |
       if [ ! -d $SPACK_PATH ]; then
-        # git clone --depth 1 -c advice.detachedHead=false  -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $SPACK_PATH
-        git clone -c feature.manyFiles=true https://github.com/spack/spack $SPACK_PATH
-        cd $SPACK_PATH
-        git checkout -b ebrains_spack_commit a8d440d3ababcdec20d665ad938ab880cd9b9d17
-        cd ../
+        git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $SPACK_PATH
         cp $CI_PROJECT_DIR/packages.yaml $SPACK_PATH/etc/spack/packages.yaml
       fi
     - . $SPACK_PATH/share/spack/setup-env.sh
@@ -322,8 +240,8 @@ sync-gitlab-spack-instance:
     - spack env list | grep -q $SPACK_NFS_ENV && echo "Environment created already" || spack env create $SPACK_NFS_ENV $CI_PROJECT_DIR/spack.yaml
     - spack env activate $SPACK_NFS_ENV
     - cp $CI_PROJECT_DIR/spack.yaml $SPACK_ROOT/var/spack/environments/$SPACK_NFS_ENV/spack.yaml
-    - spack concretize -f
-    - spack install --no-check-signature || (cp -r /tmp/spack/spack-stage $CI_PROJECT_DIR/build_logs; exit 1)
+    - spack concretize -f --fresh
+    - spack install -y --fresh --no-check-signature || (cp -r /tmp/spack/spack-stage $CI_PROJECT_DIR/build_logs; exit 1)
     - spack module tcl refresh -y
     - spack reindex
     - spack env loads -r
diff --git a/.gitlab/issue_templates/build-error.md b/.gitlab/issue_templates/build-error.md
new file mode 100644
index 0000000000000000000000000000000000000000..e01ec4645acbe56a55d4c6289f0c04fa144b9de6
--- /dev/null
+++ b/.gitlab/issue_templates/build-error.md
@@ -0,0 +1,46 @@
+<!-- Thanks for taking the time to report this build failure. To proceed with the report please:
+
+1. Title the issue "Installation issue: <name-of-the-package>".
+2. Provide the information required below.
+
+We encourage you to try, as much as possible, to reduce your problem to the minimal example that still reproduces the issue. That would help us a lot in fixing it quickly and effectively! -->
+
+### Summary
+
+|               |                                               |
+|---------------|-----------------------------------------------|
+| Summary       | Spack package build failure                   |
+| Package info  | <!-- Spack package name or entire spec -->    |
+| System        | <!-- HPC cluster name/Collab Lab/other  -->   |
+| Related       | <!-- Other related issues (if applicable) --> |
+
+### Steps to reproduce the issue
+
+<!-- Fill in the console output from the exact spec you are trying to build. -->
+```console
+$ spack spec -I <spec>
+...
+$ spack install <spec>
+...
+```
+
+### Error message
+<!-- Please post the error message from spack inside the <details> tag below: -->
+
+<details><summary>Error message</summary><pre><code>
+(add error logs here)
+</code></pre></details>
+
+### Information on your system
+
+<!-- Please include the output of `spack debug report` -->
+
+<!-- If you have any relevant configuration detail (custom `packages.yaml` or `modules.yaml`, etc.) you can add that here as well. -->
+
+### Additional information
+
+<!-- Please upload the following files. They should be present in the stage directory of the failing build. Also upload any config.log or similar file if one exists. -->
+* [spack-build-out.txt]()
+* [spack-build-env.txt]()
+
+/label ~build-error
diff --git a/create_JupyterLab_kernel.sh b/create_JupyterLab_kernel.sh
index 078c786b51f6c22bdc9a207c7eea2d56f09e778a..4eab2eca39ffb3b3e30386f742e8df5691d0fdaa 100644
--- a/create_JupyterLab_kernel.sh
+++ b/create_JupyterLab_kernel.sh
@@ -1,64 +1,73 @@
 #!/bin/bash
-#title           :create_JupyterLab_kernel.sh
-#description     :Script to create a spackified JupyterLab kernel conf and place it to NFS where it can be loaded by all users.   
-#usage           :./create_JupyterLab_kernel.sh $INSTALLATION_ROOT $ENV $LAB_KERNEL_PATH
-#==============================================================================
+
+# ===========================================================================================================
+# title         : create_JupyterLab_kernel.sh
+# usage         : ./create_JupyterLab_kernel.sh $INSTALLATION_ROOT $EBRAINS_SPACK_ENV $RELEASE_NAME $LAB_KERNEL_ROOT
+# description   : creates a spackified JupyterLab kernel conf and places it to the NFS where it can be
+#                 loaded by all users.
+# ===========================================================================================================
 
 INSTALLATION_ROOT=$1
-SPACKIFIED_ENV=$2
-LAB_KERNEL_PATH=$3
+EBRAINS_SPACK_ENV=$2
+RELEASE_NAME=$3
+LAB_KERNEL_ROOT=$4
 
 # capture the empty env
-cd /opt/app-root/src
-env >> before.txt
+cd ~
+env >> /tmp/before.txt
 
-# load spack, spack repos and spack env
-cp -r /srv/$INSTALLATION_ROOT/spack/.spack ~
-source /srv/$INSTALLATION_ROOT/spack/share/spack/setup-env.sh
-spack repo add /srv/$INSTALLATION_ROOT/ebrains-spack-builds
+# load spack
+export SPACK_USER_CACHE_PATH=$INSTALLATION_ROOT/spack/.spack
+export SPACK_USER_CONFIG_PATH=$INSTALLATION_ROOT/spack/.spack
+source $INSTALLATION_ROOT/spack/share/spack/setup-env.sh
 # no need to activate as the env is already activated in the context it is used
-#spack env activate $SPACKIFIED_ENV
+# spack env activate $EBRAINS_SPACK_ENV
 
-module use /srv/$INSTALLATION_ROOT/spack/share/spack/modules/linux-centos7-broadwell/
-module use /srv/$INSTALLATION_ROOT/spack/share/spack/modules/linux-centos7-x86_64/
-source /srv/$INSTALLATION_ROOT/spack/var/spack/environments/$SPACKIFIED_ENV/loads
-# add also user's .local python3.8 packages to allow package installation at runtime
-# by the user using pip
+# load modules
+source /usr/share/modules/init/sh
+module use $INSTALLATION_ROOT/spack/share/spack/modules/linux-ubuntu20.04-x86_64/
+source $INSTALLATION_ROOT/spack/var/spack/environments/$EBRAINS_SPACK_ENV/loads
+# also add user's .local python3.8 packages to allow package installation at runtime by the user using pip
 export PYTHONPATH=$PYTHONPATH:/opt/app-root/src/.local/lib/python3.8/site-packages
 export PATH=$PATH:/opt/app-root/src/.local/bin
-# export also python modules installed in the base docker Collab image
+# and python modules installed in the base docker Collab image
 export PYTHONPATH=$PYTHONPATH:/usr/local/lib/python3.8/dist-packages
 
 # capture the env after spack activation
-cd /opt/app-root/src
-env >> after.txt
+cd ~
+env >> /tmp/after.txt
+
+# kernel name is lowercase release name
+KERNEL_NAME=$(echo "$RELEASE_NAME" | tr '[:upper:]' '[:lower:]')
+# kernel dir is a directory inside LAB_KERNEL_ROOT named after the kernel
+KERNEL_PATH=$LAB_KERNEL_ROOT/$KERNEL_NAME
 
 # prepare the env file required for the JupyterLab kernel
-mkdir $LAB_KERNEL_PATH/bin
-cat <<EOF > $LAB_KERNEL_PATH/bin/env.sh
+mkdir -p $KERNEL_PATH/bin
+
+# start of env creation
+cat <<EOF > $KERNEL_PATH/bin/env.sh
 #!/usr/bin/env bash
 set -euxo pipefail
 EOF
 
-# load here all tools
-#spack load --sh -r python@3.8.11 py-ipykernel py-pip py-numpy@1.21.0 py-scipy py-pandas py-seaborn py-matplotlib arbor nest@3.0 neuron py-pynn tvb-data tvb-library meta-brainscales %gcc@10.3.0 >> $LAB_KERNEL_PATH/bin/env.sh
-
 # append the necessary env variables for spack env and tools
-cd /opt/app-root/src
-diff before.txt after.txt|grep ">"|cut -c 3- |awk '$0="export "$0' >> $LAB_KERNEL_PATH/bin/env.sh
+diff /tmp/before.txt /tmp/after.txt|grep ">"|cut -c 3- |awk '$0="export "$0' >> $KERNEL_PATH/bin/env.sh
 
 # end of env creation
-cat <<EOF >>$LAB_KERNEL_PATH/bin/env.sh
+cat <<EOF >>$KERNEL_PATH/bin/env.sh
 python -m ipykernel_launcher -f \$@
 EOF
-chmod +x $LAB_KERNEL_PATH/bin/env.sh
+chmod +x $KERNEL_PATH/bin/env.sh
+
 # create the new kernel's configuration file
-mkdir $LAB_KERNEL_PATH/spack_python_kernel_release_202112
-cat <<EOF >$LAB_KERNEL_PATH/spack_python_kernel_release_202112/kernel.json
+mkdir -p $KERNEL_PATH/$KERNEL_NAME
+cat <<EOF >$KERNEL_PATH/$KERNEL_NAME/kernel.json
 {
- "argv": ["$LAB_KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
- "display_name": "EBRAINS_release_v0.2_202112",
- "name": "spack_python_kernel_release_202112",
- "language": "python"
+ "argv": ["$KERNEL_PATH/bin/env.sh", "{connection_file}", "--profile=default"],
+ "display_name": "$RELEASE_NAME",
+ "name": "$KERNEL_NAME",
+ "language": "python",
+ "env": { "LAB_KERNEL_NAME": "$RELEASE_NAME", "LAB_KERNEL_RELEASE_DATE": "$(date +"%Y-%m-%d")" }
 }
 EOF
diff --git a/create_job.sh b/create_job.sh
index 906e1ba37bc6a6adccd61bfabf2cdac5843e0bf2..e8bc5fca02ef692b4d68aa13d503b6389b564c41 100644
--- a/create_job.sh
+++ b/create_job.sh
@@ -1,13 +1,21 @@
 #!/bin/bash
 
-INSTALLATION_ROOT=$1
-SPACKIFIED_ENV=$2
+# ===========================================================================================================
+# title         : create_job.sh
+# usage         : ./create_job.sh $OC_JOB_ID $BUILD_ENV_DOCKER_IMAGE $OP $INSTALLATION_ROOT $SPACK_VERSION
+#                 $SPACK_ENV $BRANCH $RELEASE_NAME $LAB_KERNEL_ROOT
+# description   : creates OKD job yaml file that builds/updates spack environment and creates Lab kernel
+# ===========================================================================================================
+
+OC_JOB_ID=$1
+BUILD_ENV_DOCKER_IMAGE=$2
 OP=$3
-SPACK_ENV_TAR_FILE=$4
-OC_JOB_ID=$5
-BUILD_ENV_DOCKER_IMAGE=$6
-LAB_KERNEL_PATH=$7
-OKD_CLUSTER_UID=$8
+INSTALLATION_ROOT=$4
+SPACK_VERSION=$5
+SPACK_ENV=$6
+BRANCH=$7
+RELEASE_NAME=$8
+LAB_KERNEL_ROOT=$9
 
 cat <<EOT >> simplejob.yml
 apiVersion: batch/v1
@@ -15,32 +23,35 @@ kind: Job
 metadata:
   name: simplejob${OC_JOB_ID}
 spec:
-  parallelism: 1    
+  parallelism: 1
   completions: 1
-  backoffLimit: 0    
-  template:         
+  backoffLimit: 0
+  template:
     metadata:
       name: testjob
     spec:
-      #securityContext:
-      #  supplementalGroups: [1000410000]
-      #  seLinuxOptions:
-      #    level: s0:c25,c10
       containers:
       - name: simplejob
         image: ${BUILD_ENV_DOCKER_IMAGE}
         imagePullPolicy: Always
-        securityContext:
-          runAsUser: ${OKD_CLUSTER_UID}
         volumeMounts:
           - name: sharedbin
             mountPath: /srv
-        command: ["/usr/local/bin/deploy-build-env.sh", "$INSTALLATION_ROOT", "$SPACKIFIED_ENV", "$OP", "$SPACK_ENV_TAR_FILE", "$LAB_KERNEL_PATH"]
+        command: ["/usr/local/bin/deploy-build-env.sh", "$OP", "$INSTALLATION_ROOT", "$SPACK_VERSION", "$SPACK_ENV", "$BRANCH", "$RELEASE_NAME", "$LAB_KERNEL_ROOT"]
+        env:
+          - name: GITLAB_USER
+            valueFrom:
+              secretKeyRef:
+                name: spack-repo-gitlab-token
+                key: username
+          - name: GITLAB_TOKEN
+            valueFrom:
+              secretKeyRef:
+                name: spack-repo-gitlab-token
+                key: password
       volumes:
         - name: sharedbin
           persistentVolumeClaim:
             claimName: shared-binaries
       restartPolicy: Never
-      #nodeSelector:
-      #  kubernetes.io/hostname: okd-worker-3.dev.hbp.eu     
 EOT
diff --git a/install_spack_env.sh b/install_spack_env.sh
new file mode 100644
index 0000000000000000000000000000000000000000..bcb3c8d71a924cf26455aac28de8512d78f23282
--- /dev/null
+++ b/install_spack_env.sh
@@ -0,0 +1,61 @@
+#!/bin/bash
+
+# ===========================================================================================================
+# title         : install_spack_env.sh
+# usage         : ./install_spack_env.sh $INSTALLATION_ROOT $SPACK_VERSION $EBRAINS_REPO $EBRAINS_SPACK_ENV
+# description   : installs or updates the spack environment defined in the EBRAINS spack repo
+#                 (if the specified spack instance doesn't exist, it also creates it)
+# ===========================================================================================================
+
+INSTALLATION_ROOT=$1
+SPACK_VERSION=$2
+EBRAINS_REPO=$3
+EBRAINS_SPACK_ENV=$4
+
+# specify location of .spack dir (by default in ~)
+export SPACK_USER_CACHE_PATH=$INSTALLATION_ROOT/spack/.spack
+export SPACK_USER_CONFIG_PATH=$INSTALLATION_ROOT/spack/.spack
+
+# initial setup: if spack dir doesn't already exist, clone it and install gcc and python
+if [ ! -d $INSTALLATION_ROOT/spack ]
+then
+  # clone spack repo, import packages.yaml config file and activate
+  git clone --depth 1 -c advice.detachedHead=false -c feature.manyFiles=true --branch $SPACK_VERSION https://github.com/spack/spack $INSTALLATION_ROOT/spack
+  cp $EBRAINS_REPO/packages.yaml $INSTALLATION_ROOT/spack/etc/spack/packages.yaml
+  source $INSTALLATION_ROOT/spack/share/spack/setup-env.sh
+  # install platform compiler and python
+  spack compiler find
+  spack install gcc@10.3.0
+  spack load gcc@10.3.0
+  spack compiler find
+  spack install python@3.8.11 %gcc@10.3.0
+else
+  source $INSTALLATION_ROOT/spack/share/spack/setup-env.sh
+fi
+
+# add repo if it does not exist
+if [[ ! $(spack repo list | grep $EBRAINS_REPO) ]]
+then
+  spack repo add $EBRAINS_REPO
+fi
+
+# create environment if it does not exist
+if [ ! -d "$SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV" ]
+then
+  spack env create $EBRAINS_SPACK_ENV
+fi
+
+# activate environment
+cp $EBRAINS_REPO/spack.yaml $SPACK_ROOT/var/spack/environments/$EBRAINS_SPACK_ENV/spack.yaml
+spack env activate $EBRAINS_SPACK_ENV
+# remove any existing concrete specs from spack.lock and re-concretize everything
+spack concretize --fresh --force
+# install the environment, use 2 jobs to reduce the amount of required RAM
+spack install --fresh --no-check-signature -y -j2
+
+# create modules files with spack
+spack module tcl refresh -y
+# rebuild spack's database
+spack reindex
+# create loads script that when sourced activates and loads the installed spack environment
+spack env loads -r
diff --git a/packages/arbor/package.py b/packages/arbor/package.py
index 56791d87b3bfa8f2bfc8addb4174eeda5f9564ce..2fd9bb8e67bfdd45e5ad5efb7fd256f8d11aae4b 100644
--- a/packages/arbor/package.py
+++ b/packages/arbor/package.py
@@ -12,10 +12,15 @@ class Arbor(CMakePackage, CudaPackage):
 
     homepage = "https://arbor-sim.org"
     git = "https://github.com/arbor-sim/arbor.git"
-    url = "https://github.com/arbor-sim/arbor/releases/download/v0.7/arbor-v0.7-full.tar.gz"
+    url = "https://github.com/arbor-sim/arbor/releases/download/v0.8/arbor-v0.8-full.tar.gz"
     maintainers = ["bcumming", "brenthuisman", "haampie", "schmitts"]
 
     version("master", branch="master", submodules=True)
+    version(
+        "0.8",
+        sha256="18df5600308841616996a9de93b55a105be0f59692daa5febd3a65aae5bc2c5d",
+        url="https://github.com/arbor-sim/arbor/releases/download/v0.8/arbor-v0.8-full.tar.gz",
+    )
     version(
         "0.7",
         sha256="c3a6b7193946aee882bb85f9c38beac74209842ee94e80840968997ba3b84543",
@@ -31,11 +36,6 @@ class Arbor(CMakePackage, CudaPackage):
         sha256="290e2ad8ca8050db1791cabb6b431e7c0409c305af31b559e397e26b300a115d",
         url="https://github.com/arbor-sim/arbor/releases/download/v0.5.2/arbor-v0.5.2-full.tar.gz",
     )
-    version(
-        "0.5",
-        sha256="d0c8a4c7f97565d7c30493c66249be794d1dc424de266fc79cecbbf0e313df59",
-        url="https://github.com/arbor-sim/arbor/releases/download/v0.5/arbor-v0.5-full.tar.gz",
-    )
 
     variant(
         "assertions",
@@ -53,20 +53,23 @@ class Arbor(CMakePackage, CudaPackage):
     )
 
     # https://docs.arbor-sim.org/en/latest/install/build_install.html#compilers
-    conflicts("%gcc@:8.3")
-    conflicts("%clang@:7")
+    conflicts("%gcc@:8")
+    conflicts("%clang@:9")
     # Cray compiler v9.2 and later is Clang-based.
     conflicts("%cce@:9.1")
     conflicts("%intel")
 
-    depends_on("cmake@3.12:", type="build")
+    depends_on("cmake@3.19:", type="build")
 
     # misc dependencies
     depends_on("fmt@7.1:", when="@0.5.3:")  # required by the modcc compiler
+    depends_on("fmt@9.1:", when="@0.7.1:")
     depends_on("nlohmann-json")
     depends_on("random123")
-    depends_on("cuda@10:", when="+cuda")
     depends_on("libxml2", when="+neuroml")
+    with when("+cuda"):
+        depends_on("cuda@10:")
+        depends_on("cuda@11:", when="@0.7.1:")
 
     # mpi
     depends_on("mpi", when="+mpi")
@@ -79,6 +82,7 @@ class Arbor(CMakePackage, CudaPackage):
     with when("+python"):
         depends_on("py-pybind11@2.6:", type=("build"))
         depends_on("py-pybind11@2.8.1:", when="@0.5.3:", type=("build"))
+        depends_on("py-pybind11@2.10.1:", when="@0.7.1:", type=("build"))
 
     # sphinx based documentation
     depends_on("python@3.7:", when="+doc", type="build")
diff --git a/packages/hxtorch/package.py b/packages/hxtorch/package.py
index c1b2fff901e646c41505cc56347f328c70f17879..6ac13f5d225ae135030e5b794deae5bb5de8609e 100644
--- a/packages/hxtorch/package.py
+++ b/packages/hxtorch/package.py
@@ -3,6 +3,7 @@
 #
 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
 from spack import *
+from spack.pkg.builtin.boost import Boost
 import os
 
 
@@ -13,14 +14,15 @@ class Hxtorch(WafPackage):
     # This repo provides a waf binary used for the build below
     git      = "https://github.com/electronicvisions/pynn-brainscales.git"
 
-    version('4.0-a1', branch='waf')
+    version('4.0-a3', branch='waf')
 
     # PPU compiler dependencies
-    depends_on('oppulance@4.0:')
+    depends_on('oppulance@4.0-a3')
 
     # host software dependencies
     depends_on('bitsery', type=('build', 'link', 'run'))
     depends_on('binutils+gold+ld+plugins', type=('build', 'link', 'run')) # specialize
+    depends_on(Boost.with_default_variants)
     depends_on('boost@1.69.0: +graph+icu+mpi+python+numpy+coroutine+context cxxstd=17', type=('build', 'link', 'run')) # specialize boost (non-clingo, type=('build', 'link', 'run'))
     depends_on('cereal', type=('build', 'link', 'run'))
     depends_on('cppcheck', type=('build', 'link', 'run'))
diff --git a/packages/llvm/package.py b/packages/llvm/package.py
index 4d0226b73eadec1074649cc1c671aa32f2cace15..a12a7760917b3a7d33acf9e7d12297b34f8c9aa5 100644
--- a/packages/llvm/package.py
+++ b/packages/llvm/package.py
@@ -771,7 +771,7 @@ class Llvm(CMakePackage, CudaPackage):
         return ret
 
     # ECM: might be not needed anymore?
-    def add_files_to_view(self, view, merge_map):
+    def add_files_to_view(self, view, merge_map, skip_if_exists=True):
         # we remove libgomp-related files from views as they conflict with
         # gcc-ones
         ignore_file_paths = [
@@ -783,7 +783,7 @@ class Llvm(CMakePackage, CudaPackage):
                 if path in merge_map:
                     del merge_map[path]
 
-        super(Llvm, self).add_files_to_view(view, merge_map)
+        super(Llvm, self).add_files_to_view(view, merge_map, skip_if_exists=True)
 
 def get_llvm_targets_to_build(spec):
     targets = spec.variants['targets'].value
diff --git a/packages/oppulance/package.py b/packages/oppulance/package.py
index eb4a89bc3f86824ef4913387468e19fc8488c567..d307c7ab1e6e1dc74a333420ef980d1567b51b27 100644
--- a/packages/oppulance/package.py
+++ b/packages/oppulance/package.py
@@ -21,8 +21,8 @@ class Oppulance(Package):
 
     releases = [
         {
-            'version': '4.0-a1',
-            'tag': 'ebrains-4.0-a1'
+            'version': '4.0-a3',
+            'tag': 'ebrains-4.0-a3'
         },
     ]
 
diff --git a/packages/py-libsonata/package.py b/packages/py-libsonata/package.py
index 6ac7d2105103dd1609b191e7de35477cbbd70470..4c9ee33b5ec7c51f54dc1d5bdb14546e5cd3a03f 100644
--- a/packages/py-libsonata/package.py
+++ b/packages/py-libsonata/package.py
@@ -11,25 +11,23 @@ class PyLibsonata(PythonPackage):
 
     homepage = "https://github.com/BlueBrain/libsonata"
     git = "https://github.com/BlueBrain/libsonata.git"
+    pypi = "libsonata/libsonata-0.1.14.tar.gz"
 
-    version('develop', branch='master', submodules=True, get_full_repo=True)
-    version('0.1.12', tag='v0.1.12', submodules=True, get_full_repo=True)
-    version('0.1.11', tag='v0.1.11', submodules=True, get_full_repo=True)
-    version('0.1.10', tag='v0.1.10', submodules=True, get_full_repo=True)
-    # Important: v0.1.9 is not Spack-compatible (use v0.1.10: instead)
-    # version('0.1.9', tag='v0.1.9', submodules=True, get_full_repo=True)
-    version('0.1.8', tag='v0.1.8', submodules=True, get_full_repo=True)
-    version('0.1.6', tag='v0.1.6', submodules=True, get_full_repo=True)
-    version('0.1.5', tag='v0.1.5', submodules=True, get_full_repo=True)
-    version('0.1.4', tag='v0.1.4', submodules=True, get_full_repo=True)
-    version('0.1.3', tag='v0.1.3', submodules=True, get_full_repo=True)
-    version('0.1.0', tag='v0.1.0', submodules=True, get_full_repo=True)
-    version('0.0.3', tag='v0.0.3', submodules=True)
+    submodules = True
+
+    version('develop', branch='master')
+    version('0.1.16', sha256='49ced56992ba8be8aa6638525e8078b7e3ce0d5c05c34ee90746cab02bb5185a')
+    version('0.1.15', sha256='8c7c509db692b482cba5b0453579747db5a981ce5b3c13da96b14ae0332a6e81')
+    version('0.1.14', sha256='a5c75df1c3ef6fac10d92fb6781643e0834e5c35debe77693686dab8bfcf221f')
+    version('0.1.13', sha256='8263938e49b501c477f626b4c25e0c74e91152268830c69aabc96eeb263c6eea')
+    version('0.1.12', sha256='f0fa0f3b129d28e41b337ce2c39c3604990752de8e485327ec9df3bf0360e9c1')
+    version('0.1.11', sha256='95f302818971fec3f19ef18febd5c31c580490692138c8e4fe3534104d88b5e0')
+    version('0.1.10', sha256='7ef9f911f7ea31da5ff5306d8372ec194d223850aede0878ac2a921ce049bbb2')
 
     depends_on('cmake@3.3:', type='build')
     depends_on('hdf5')
     depends_on('py-pybind11')
 
-    depends_on('py-numpy@1.12:', type=('build', 'run'))
+    depends_on('py-numpy@1.17:', type=('build', 'run'))
     depends_on('py-setuptools', type='build', when='@0.1:')
     depends_on('py-setuptools-scm', type='build', when='@0.1:')
diff --git a/packages/py-snudda/package.py b/packages/py-snudda/package.py
index fca424ddd3fc1b611650996cc1d05539889e8559..333e28a3a41817154d7d2f0f6b510810af4beea5 100644
--- a/packages/py-snudda/package.py
+++ b/packages/py-snudda/package.py
@@ -19,7 +19,7 @@ class PySnudda(Package):
     depends_on('py-mpi4py', type=('build','run'))
     depends_on('py-numpy', type=('build','run'))
     depends_on('py-scipy', type=('build','run'))
-    depends_on('py-libsonata', type=('build','run')) # got it from https://github.com/BlueBrain/spack/blob/develop/bluebrain/repo-bluebrain/packages/py-libsonata/package.py
+    depends_on('py-libsonata', type=('build','run')) # got it from from BlueBrainProject https://github.com/BlueBrain/spack/blob/develop/bluebrain/repo-bluebrain/packages/py-libsonata/package.py
     depends_on('py-pyzmq', type=('build','run'))
     depends_on('py-numexpr', type=('build','run'))
     depends_on('neuron', type=('build','run'))
diff --git a/packages/py-spalloc/package.py b/packages/py-spalloc/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..99d8ae94a055692d14bc64cb125d22c300c254ef
--- /dev/null
+++ b/packages/py-spalloc/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpalloc(PythonPackage):
+    """Spalloc is a Python library and set of command-line programs for
+    requesting SpiNNaker machines from a spalloc server."""
+
+    homepage = "https://github.com/SpiNNakerManchester/spalloc"
+    pypi = "spalloc/spalloc-1!6.0.0.tar.gz"
+
+    version('6.0.0', sha256='1275fd703dfa36fe7fc03407f768f95f05597092c2fe6fb0e743dacb5528be08')
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-appdirs", type=("build", "run"))
+    depends_on("py-spinnutilities@6.0.0", type=("build", "run"), when="^python@3.7")
diff --git a/packages/py-spinnaker-dataspecification/package.py b/packages/py-spinnaker-dataspecification/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..07d1dbcc72860d2445d28f9e19464e9dd78d31ce
--- /dev/null
+++ b/packages/py-spinnaker-dataspecification/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpinnakerDataspecification(PythonPackage):
+    """This package provides utilities for specifying binary data
+    algorithmically, and executing the specifications to produce the data."""
+
+    homepage = "https://github.com/SpiNNakerManchester/DataSpecification"
+    pypi = "SpiNNaker_DataSpecification/SpiNNaker_DataSpecification-1!6.0.0.tar.gz"
+
+    version("6.0.0", sha256="5265771f5a3e77b1d7a1e075d8cfea5987c37b0a3880874316101a33d15d01dd")
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-spinnutilities@6.0.0", type=("build", "run"))
+    depends_on("py-spinnmachine@6.0.0", type=("build", "run"))
diff --git a/packages/py-spinnaker-pacman/package.py b/packages/py-spinnaker-pacman/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ac11a88cf66b4de780e575ef295b9e5a5995f21
--- /dev/null
+++ b/packages/py-spinnaker-pacman/package.py
@@ -0,0 +1,22 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpinnakerPacman(PythonPackage):
+    """This package provides utilities for partitioning, placing a routing on a
+    SpiNNaker machine."""
+
+    homepage = "https://github.com/SpiNNakerManchester/PACMAN"
+    pypi = "SpiNNaker_PACMAN/SpiNNaker_PACMAN-1!6.0.0.tar.gz"
+
+    version("6.0.0", sha256="2baf2d2451fa3c645dc731bb5716b95bb7f7bac1ea5f569921783ffedd1f0a88")
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-jsonschema", type=("build", "run"))
+    depends_on("py-sortedcollections", type=("build", "run"))
+    depends_on("py-spinnutilities@6.0.0", type=("build", "run"))
+    depends_on("py-spinnmachine@6.0.0", type=("build", "run"))
diff --git a/packages/py-spinnfrontendcommon/package.py b/packages/py-spinnfrontendcommon/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..d1881565d3ff2e96cc3f38a79611a88139abe359
--- /dev/null
+++ b/packages/py-spinnfrontendcommon/package.py
@@ -0,0 +1,27 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpinnfrontendcommon(PythonPackage):
+    """This package provides utilities for specifying binary data
+    algorithmically, and executing the specifications to produce the data."""
+
+    homepage = "https://github.com/SpiNNakerManchester/SpiNNFrontEndCommon"
+    pypi = "SpiNNFrontEndCommon/SpiNNFrontEndCommon-1!6.0.0.tar.gz"
+
+    version("6.0.0", sha256="a45770487d2b7e8d865bb5fb472f6f4ad22025924448611c19ea9afa7ddf64e5")
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-spinnutilities@6.0.0", type=("build", "run"))
+    depends_on("py-spinnmachine@6.0.0", type=("build", "run"))
+    depends_on("py-spinnman@6.0.0", type=("build", "run"))
+    depends_on("py-spinnaker-pacman@6.0.0", type=("build", "run"))
+    depends_on("py-spinnaker-dataspecification@6.0.0", type=("build", "run"))
+    depends_on("py-spalloc@6.0.0", type=("build", "run"))
+    depends_on("py-requests@2.4.1:", type=("build", "run"))
+    depends_on("py-scipy@0.16.0:1.7", type=("build", "run"), when="^python@3.7")
+    depends_on("py-scipy@0.16.0:", type=("build", "run"))
diff --git a/packages/py-spinnmachine/package.py b/packages/py-spinnmachine/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..133f31245037da4c9b950236500ea80229f9ae39
--- /dev/null
+++ b/packages/py-spinnmachine/package.py
@@ -0,0 +1,19 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpinnmachine(PythonPackage):
+    """This package is used to provide a Python representation of a SpiNNaker
+    machine."""
+
+    homepage = "https://github.com/SpiNNakerManchester/SpiNNMachine"
+    pypi = "SpiNNMachine/SpiNNMachine-1!6.0.0.tar.gz"
+
+    version("6.0.0", sha256="713510e78353a3772b4a5fb52cf742ea76fb5d520f5fac3b5c3fd34534afc7ed")
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-spinnutilities@6.0.0", type=("build", "run"))
diff --git a/packages/py-spinnman/package.py b/packages/py-spinnman/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..b9604f61645de77b34dfa29413f17b54635deecd
--- /dev/null
+++ b/packages/py-spinnman/package.py
@@ -0,0 +1,20 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpinnman(PythonPackage):
+    """This package provides utilities for interacting with a SpiNNaker
+    machine."""
+
+    homepage = "https://github.com/SpiNNakerManchester/SpiNNMan"
+    pypi = "SpiNNMan/SpiNNMan-1!6.0.0.tar.gz"
+
+    version("6.0.0", sha256="9e88789de3417bd9aa9d038c14efe7312569293aadc3a110dfd0d1005cc21241")
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-spinnutilities@6.0.0", type=("build", "run"))
+    depends_on("py-spinnmachine@6.0.0", type=("build", "run"))
diff --git a/packages/py-spinnutilities/package.py b/packages/py-spinnutilities/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..687340dfea2d0ca6a0536519b14b9308adcf9a03
--- /dev/null
+++ b/packages/py-spinnutilities/package.py
@@ -0,0 +1,24 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpinnutilities(PythonPackage):
+    """This provides basic utility functions and classes to other parts of
+    SpiNNaker"s tooling. Nothing in here knows anything about SpiNNaker
+    functionality."""
+
+    homepage = "https://github.com/SpiNNakerManchester/SpiNNUtils"
+    pypi = "SpiNNUtilities/SpiNNUtilities-1!6.0.0.tar.gz"
+
+    version("6.0.0", sha256="b84b1c174dd0824eac97e8bbb64a56189c082de7fcfc5be07cf9ac3bed9efd81")
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-appdirs", type=("build", "run"))
+    depends_on("py-numpy@1.13:1.20", when="^python@3.7")
+    depends_on("py-numpy", type=("build", "run"), when="^python@3.8:")
+    depends_on("py-pyyaml", type=("build", "run"))
+    depends_on("py-requests", type=("build", "run"))
diff --git a/packages/py-spynnaker/package.py b/packages/py-spynnaker/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..03d8083c943ded38982cb01920103b18584590ef
--- /dev/null
+++ b/packages/py-spynnaker/package.py
@@ -0,0 +1,36 @@
+# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+
+from spack.package import *
+
+
+class PySpynnaker(PythonPackage):
+    """This package provides common code for PyNN implementations for
+    SpiNNaker."""
+
+    homepage = "https://github.com/SpiNNakerManchester/sPyNNaker"
+    pypi = "sPyNNaker/sPyNNaker-1!6.0.0.tar.gz"
+
+    version("6.0.0", sha256="821c2af838cffeba65feb0e12bf3a4c39b1b6a597d224871cf0f0476e38edfef")
+
+    depends_on("python@3.7:", type=("build", "run"))
+    depends_on("py-spinnutilities@6.0.0", type=("build", "run"))
+    depends_on("py-spinnmachine@6.0.0", type=("build", "run"))
+    depends_on("py-spinnman@6.0.0", type=("build", "run"))
+    depends_on("py-spinnaker-pacman@6.0.0", type=("build", "run"))
+    depends_on("py-spinnaker-dataspecification@6.0.0", type=("build", "run"))
+    depends_on("py-spalloc@6.0.0", type=("build", "run"))
+    depends_on("py-spinnfrontendcommon@6.0.0", type=("build", "run"))
+    depends_on("py-matplotlib@:3.5.99", type=("build", "run"), when="^python@3.7")
+    depends_on("py-matplotlib", type=("build", "run"))
+    depends_on("py-quantities@0.12.1:", type=("build", "run"))
+    #depends_on("py-pynn@0.9.1:0.9", type=("build", "run"))
+    depends_on("py-pynn@0.9.1:", type=("build", "run"))
+    #depends_on("py-lazyarray@0.2.9:0.4.0", type=("build", "run"))
+    depends_on("py-lazyarray@0.2.9:", type=("build", "run"))
+    #depends_on("py-appdirs@1.4.2:1.9", type=("build", "run"))
+    depends_on("py-appdirs@1.4.2:", type=("build", "run"))
+    #depends_on("py-neo@0.5.2:0.9", type=("build", "run"))
+    depends_on("py-neo@0.5.2:", type=("build", "run"))
diff --git a/packages/pynn-brainscales/package.py b/packages/pynn-brainscales/package.py
index 4b32f2aafa2979780d5e957ba87446a4590845f2..95e0d97a9e46c99867daac395add7a52b0d8f903 100644
--- a/packages/pynn-brainscales/package.py
+++ b/packages/pynn-brainscales/package.py
@@ -3,6 +3,7 @@
 #
 # SPDX-License-Identifier: (Apache-2.0 OR MIT)
 from spack import *
+from spack.pkg.builtin.boost import Boost
 
 
 class PynnBrainscales(WafPackage):
@@ -11,14 +12,15 @@ class PynnBrainscales(WafPackage):
     homepage = "https://github.com/electronicvisions/pynn-brainscales"
     git      = "https://github.com/electronicvisions/pynn-brainscales.git"
 
-    version('4.0-a1', branch='waf')
+    version('4.0-a3', branch='waf')
 
     # PPU compiler dependencies
-    depends_on('oppulance@4.0:')
+    depends_on('oppulance@4.0-a3')
 
     # host software dependencies
     depends_on('bitsery', type=('build', 'link', 'run'))
     depends_on('binutils+gold+ld+plugins', type=('build', 'link', 'run')) # specialize
+    depends_on(Boost.with_default_variants)
     depends_on('boost@1.69.0: +graph+icu+mpi+python+numpy+coroutine+context cxxstd=17', type=('build', 'link', 'run')) # specialize boost (non-clingo, type=('build', 'link', 'run'))
     depends_on('cereal', type=('build', 'link', 'run'))
     depends_on('cppcheck', type=('build', 'link', 'run'))
diff --git a/spack.yaml b/spack.yaml
index aff6de997ff8c46976385a6dd362776aa2e1de6e..fa04410e01e78c76b8f4ce95462236484263b5c5 100644
--- a/spack.yaml
+++ b/spack.yaml
@@ -20,7 +20,7 @@ spack:
     #- py-version-query
     # EBRAINS simulators
     - nest@3.3 +python +gsl +mpi
-    - arbor +python +mpi ^python@3:3.9
+    - arbor@0.8 +python +mpi
     - neuron +mpi
     - py-pynn@0.10.1 +mpi
     - py-brian2
@@ -30,8 +30,8 @@ spack:
     - py-pyaescrypt
     - py-formencode
     - tvb-framework ^binutils+ld+gold
-    - pynn-brainscales@4.0-a1 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
-    - hxtorch@4.0-a1 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
+    - pynn-brainscales@4.0-a3 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
+    - hxtorch@4.0-a3 ^log4cxx@0.10.0 ^googletest@1.11.0:+gmock
     - py-neo
     - py-hdmf
     - py-pynwb
@@ -68,4 +68,5 @@ spack:
     #- sda
     # demo for codejam12
     #- funniest1022
-  concretization: together
+  concretizer:
+    unify: true