diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e8524909bf8a135e2bed01c413ca1b1f5ec91d46..60be5dfdd35512dd65ca9b3f75e74eced7d439c8 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -4,10 +4,12 @@ stages:
 
 variables:
   BUILD_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/ebrains-spack-build-env/base:devel
+  RUN_ENV_DOCKER_IMAGE: docker-registry.ebrains.eu/clb-jupyter-image/ebrains:dev-a7ab31be 
   SPACK_PATH_GITLAB: /mnt/spack_v0.23.1
   SYSTEMNAME: ebrainslab
   GIT_SUBMODULE_STRATEGY: recursive
   GIT_CLEAN_FLAGS: -ffdxq
+  RUNNER_AFTER_SCRIPT_TIMEOUT: 20
 
 # ===================================================================
 # LAB DEPLOYMENTS
@@ -47,6 +49,15 @@ variables:
     - if [ $(kubectl get pods -l job-name=simplejob${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi;
     # delete the job, as we have the logs here
     - kubectl delete job simplejob${CI_PIPELINE_ID} || true
+  after_script:
+    - kubectl config use-context $KUBE_CONTEXT
+    - sh create_job_widget_script.sh $CI_PIPELINE_ID $RUN_ENV_DOCKER_IMAGE $INSTALLATION_ROOT $SPACK_ENV $RELEASE_NAME $LAB_KERNEL_ROOT
+    - cat widget-script.yml
+    - kubectl create -f widget-script.yml
+    - while true; do sleep 300; x=$(kubectl get pods -l job-name=widget-script${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.phase}'); if [ $x != "Running" ]; then break; fi; done
+    - kubectl logs jobs/widget-script${CI_PIPELINE_ID} | tee log.txt
+    - if [ $(kubectl get pods -l job-name=widget-script${CI_PIPELINE_ID} -o jsonpath='{.items[0].status.containerStatuses[0].state.terminated.exitCode}') -ne 0 ]; then exit 1; fi;
+    - kubectl delete job widget-script${CI_PIPELINE_ID} || true
   # artifacts:
   #   paths:
   #     - spack_logs
diff --git a/create_JupyterLab_kernel.sh b/create_JupyterLab_kernel.sh
index 0f02e1210d40d82862917e22282d3a544d0ec498..769995959b5fce421db1b8585dc3b7d2a939077f 100644
--- a/create_JupyterLab_kernel.sh
+++ b/create_JupyterLab_kernel.sh
@@ -37,7 +37,7 @@ cp $INSTALLATION_ROOT/spack/var/spack/environments/$EBRAINS_SPACK_ENV/load_env.s
 # and the location of python modules installed in the base docker Collab image
 cat <<EOF >> $KERNEL_PATH/bin/env.sh
 export PATH=\$PATH:/opt/app-root/src/.local/bin
-export PYTHONPATH=\$PYTHONPATH:/opt/app-root/src/.local/lib/python3.8/site-packages:/usr/local/lib/python3.8/dist-packages
+export PYTHONPATH=\$PYTHONPATH:/opt/conda/lib/python3.11/site-packages
 export R_LIBS_USER=/opt/app-root/src/.local/lib/R/site-library
 mkdir -p \$R_LIBS_USER
 export R_LIBS=\$R_LIBS_USER:\$R_LIBS
diff --git a/create_job.sh b/create_job.sh
index f7b5d08962446b3edb070093608caa66fe19dd2c..56d80c8fc0aaa7337b340b00c30bd61945a1c883 100644
--- a/create_job.sh
+++ b/create_job.sh
@@ -105,13 +105,6 @@ spec:
           persistentVolumeClaim:
             claimName: shared-binaries
         - name: tmp
-          ephemeral:
-            volumeClaimTemplate:
-              spec:
-                accessModes: [ "ReadWriteMany" ]
-                storageClassName: "longhorn-0"
-                resources:
-                  requests:
-                    storage: 50Gi
+          emptyDir: {}
       restartPolicy: Never
 EOT
diff --git a/create_job_widget_script.sh b/create_job_widget_script.sh
new file mode 100644
index 0000000000000000000000000000000000000000..4bc5c613641ce074a5319f730cb4220c5a51d986
--- /dev/null
+++ b/create_job_widget_script.sh
@@ -0,0 +1,68 @@
+#!/bin/bash
+
+# ===========================================================================================================
+# title         : create_job_widget_script.sh
+# usage         : ./create_job_widget_script.sh $OC_JOB_ID $RUN_ENV_DOCKER_IMAGE $INSTALLATION_ROOT
+#                 $SPACK_ENV $RELEASE_NAME $LAB_KERNEL_ROOT
+# description   : creates job file that generates a script to load the jupyter extensions for a given env
+# ===========================================================================================================
+
+OC_JOB_ID=$1
+RUN_ENV_DOCKER_IMAGE=$2
+INSTALLATION_ROOT=$3
+EBRAINS_SPACK_ENV=$4
+RELEASE_NAME=$5
+LAB_KERNEL_ROOT=$6
+
+cat <<EOT >> widget-script.yml
+apiVersion: batch/v1
+kind: Job
+metadata:
+  name: widget-script${OC_JOB_ID}
+spec:
+  parallelism: 1
+  completions: 1
+  backoffLimit: 0
+  template:
+    spec:
+      containers:
+      - name: widget-script
+        image: ${RUN_ENV_DOCKER_IMAGE}
+        imagePullPolicy: Always
+        resources:
+          limits:
+            cpu: '1'
+            memory: '1Gi'
+          requests:
+            cpu: '0.5'
+            memory: '500Mi'
+        volumeMounts:
+          - name: sharedbin
+            mountPath: /srv
+        command:
+        - /bin/bash
+        - -c
+        - |
+          . \$INSTALLATION_ROOT/spack/share/spack/setup-env.sh
+          spack env activate --without-view \$EBRAINS_SPACK_ENV
+          KERNEL_PATH=\$LAB_KERNEL_ROOT/\$(echo "\$RELEASE_NAME" | tr '[:upper:]' '[:lower:]')
+          spack load --sh py-tvb-ext-bucket py-tvb-ext-unicore py-tvb-ext-xircuits > \$KERNEL_PATH/bin/widget_activation.sh
+        env:
+          - name: SYSTEMNAME
+            value: ebrainslab
+          - name: SPACK_DISABLE_LOCAL_CONFIG
+            value: "true"
+          - name: INSTALLATION_ROOT
+            value: "$INSTALLATION_ROOT"
+          - name: EBRAINS_SPACK_ENV
+            value: "$EBRAINS_SPACK_ENV"
+          - name: RELEASE_NAME
+            value: "$RELEASE_NAME"
+          - name: LAB_KERNEL_ROOT
+            value: "$LAB_KERNEL_ROOT"
+      volumes:
+        - name: sharedbin
+          persistentVolumeClaim:
+            claimName: shared-binaries
+      restartPolicy: Never
+EOT
diff --git a/install_spack_env.sh b/install_spack_env.sh
index 41a6526a3dd7d72e41a74293d2e4d76c218c014a..a0ed1f67a75ffc4d7f740871259abbf95f5ae258 100644
--- a/install_spack_env.sh
+++ b/install_spack_env.sh
@@ -96,9 +96,6 @@ cp /tmp/spack.yaml ${CI_SPACK_ROOT}/var/spack/environments/$EBRAINS_SPACK_ENV/
 # activate environment
 spack env activate --without-view $EBRAINS_SPACK_ENV
 
-# deactivate view during concretization and installation
-spack env view disable
-
 spack concretize --force --fresh --test root
 
 # dump dag to file
@@ -147,10 +144,6 @@ fi
 # delay exit code until we have updated the cache below
 spack install --no-check-signature -y -j$SPACK_JOBS --fresh --test root && spack_install_ret=$? || spack_install_ret=$?
 
-# re-enable view
-spack env view enable
-spack env view regenerate
-
 # no need to update the local cache nor the remote cache if we don't want to update
 if [ -n "${OCI_CACHE_PREFIX}" ] && [ "${UPDATE_SPACK_OCI_CACHES:-false}" = "true" ]; then
     # push previously missing (but now installed) packages to the local cache
diff --git a/packages/py-bluepyopt/package.py b/packages/py-bluepyopt/package.py
new file mode 100644
index 0000000000000000000000000000000000000000..aa4e5254800dfa5a6d358856fe5637b77dcefbef
--- /dev/null
+++ b/packages/py-bluepyopt/package.py
@@ -0,0 +1,42 @@
+# Copyright 2013-2024 Lawrence Livermore National Security, LLC and other
+# Spack Project Developers. See the top-level COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
+from spack.package import *
+
+
+class PyBluepyopt(PythonPackage):
+    """Bluebrain Python Optimisation Library"""
+
+    homepage = "https://github.com/BlueBrain/BluePyOpt"
+    pypi = "bluepyopt/bluepyopt-1.9.27.tar.gz"
+
+    license("LGPL-3.0-only")
+
+    # NOTE : while adding new release check pmi_rank.patch compatibility
+    version("1.14.18", sha256="23d6239294d944c8f9d4ea298091bcf243d236735844e1bcba60535a0f520ca8")
+    version("1.14.11", sha256="fe2830c36699a93d2ef9ddef316da42f9c57ca6654c92356eab973ee2298ebf7")
+    version("1.14.4", sha256="7567fd736053250ca06030f67ad93c607b100c2b98df8dc588c26b64cb3e171c")
+
+    # patch required to avoid hpe-mpi linked mechanism library
+    patch("pmi_rank.patch")
+
+    variant("scoop", default=False, description="Use BluePyOpt together with py-scoop")
+
+    depends_on("py-setuptools", type="build")
+    depends_on("py-numpy@1.6:", type=("build", "run"))
+    depends_on("py-pandas@0.18:", type=("build", "run"))
+    depends_on("py-deap@1.3.3:", type=("build", "run"))
+    depends_on("py-efel@2.13:", type=("build", "run"))
+    depends_on("py-ipyparallel", type=("build", "run"))
+    depends_on("py-pickleshare@0.7.3:", type=("build", "run"))
+    depends_on("py-jinja2@2.8:", type=("build", "run"))
+    depends_on("py-future", type=("build", "run"))
+    depends_on("py-pebble@4.6:", type=("build", "run"))
+    depends_on("py-scoop@0.7:", type=("build", "run"), when="+scoop")
+    depends_on("neuron@7.4:", type=("build", "run"), when="@:1.14.4")
+    depends_on("neuron@7.8:", type=("build", "run"), when="@1.14.11:")
+
+    def setup_run_environment(self, env):
+        env.unset("PMI_RANK")
+        env.set("NEURON_INIT_MPI", "0")
diff --git a/packages/py-bluepyopt/pmi_rank.patch b/packages/py-bluepyopt/pmi_rank.patch
new file mode 100644
index 0000000000000000000000000000000000000000..21a73849b2868389f5e05d9670adc0fb18fadab5
--- /dev/null
+++ b/packages/py-bluepyopt/pmi_rank.patch
@@ -0,0 +1,17 @@
+diff --git a/bluepyopt/ephys/simulators.py b/bluepyopt/ephys/simulators.py
+index e71ad8b..3c93237 100644
+--- a/bluepyopt/ephys/simulators.py
++++ b/bluepyopt/ephys/simulators.py
+@@ -89,6 +89,12 @@ class NrnSimulator(object):
+             NrnSimulator._nrn_disable_banner()
+             self.banner_disabled = True
+ 
++        # certain mpi libraries (hpe-mpt) use PMI_RANK env variable to initialize
++        # MPI before calling MPI_Init (which is undesirable). Unset this variable
++        # if exist to avoid issue with loading neuron and mechanism library.
++        if 'PMI_RANK' in os.environ:
++            os.environ.pop("PMI_RANK")
++
+         import neuron  # NOQA
+ 
+         return neuron
diff --git a/packages/py-ipycanvas/package.py b/packages/py-ipycanvas/package.py
index 0883611fad05a28d9dd5364afb65ecece7eb3e68..a37909bf23215c56ccd35371d9101f3828da3e4d 100644
--- a/packages/py-ipycanvas/package.py
+++ b/packages/py-ipycanvas/package.py
@@ -24,6 +24,7 @@ class PyIpycanvas(PythonPackage):
     # begin EBRAINS (added)
     depends_on("python@3.6:", when="@0.10:", type=("build", "run"))
     depends_on("python@3.7:", when="@0.11:", type=("build", "run"))
+    depends_on('py-jupyter-core', type=("build", "run"))
     # end EBRAINS
     depends_on("py-setuptools@40.8:", type="build")
     depends_on("py-jupyter-packaging@0.7", type="build")
diff --git a/packages/py-snudda/package.py b/packages/py-snudda/package.py
index f8458a3fc7cf8fc6a2b1752265c13a2531e31d98..8d0ca3d80ac52b0d889b54332643d224b4152e09 100644
--- a/packages/py-snudda/package.py
+++ b/packages/py-snudda/package.py
@@ -10,10 +10,12 @@ class PySnudda(PythonPackage):
     """Snudda creates the connectivity for realistic networks of simulated neurons in silico in a bottom up fashion that can then be simulated using the NEURON software."""
 
     homepage = "https://pypi.org/project/snudda/"
-    pypi     = "snudda/snudda-2.0.1.tar.gz"
+    pypi     = "snudda/snudda-2.1.2.tar.gz"
 
     maintainers = ["hjorth"]
-    
+
+    version("2.1.10", "a1f5bf39ee0418e7ce8a0783042c59c8")
+    version("2.1.2", "5d61a548995f88f95f680bf124534287")
     version("2.0.1", "0d78f5ca2cfe728b216f980078d8558a")
     version("1.4.71", "5871e4af5e1a011d26a22d7dc384638a")
     version("1.4.0", "55f9b398b01b34bf3cec28c8a3aebc78")
@@ -22,23 +24,20 @@ class PySnudda(PythonPackage):
     depends_on("unzip",                 type=("build"))
     depends_on("py-setuptools",         type=("build"))
     depends_on("py-importlib-metadata", type=("build","run"))
-    depends_on("py-bluepyopt@1.11.7:",  type=("build","run"))
-    depends_on("py-h5py@3.2.1:",        type=("build","run"))
-    depends_on("py-ipyparallel@6.3.0:", type=("build","run"))
-    depends_on("py-matplotlib@3.3.4:",  type=("build","run"))
-    depends_on("py-mpi4py@3.0.3:",      type=("build","run"))
-    depends_on("py-numpy@1.20.2:",      type=("build","run"))
-    depends_on("py-scipy@1.6.3:",       type=("build","run"))
-    depends_on("py-libsonata@0.0.2:",   type=("build","run"))
-    depends_on("py-pyzmq@22.0.3:",      type=("build","run"))
-    depends_on("py-numexpr@2.7.3:",     type=("build","run"))
-    depends_on("neuron@7.8.2:",         type=("build","run"))
+    depends_on("py-bluepyopt@1.14.18:", type=("build","run"))
+    depends_on("py-h5py@3.12.1:",       type=("build","run"))
+    depends_on("py-ipyparallel@8.4.1:", type=("build","run"))
+    depends_on("py-matplotlib@3.8:",    type=("build","run"))
+    depends_on("py-mpi4py@4.0.1:",      type=("build","run"))
+    depends_on("py-numpy@1.25.2:1",     type=("build","run"))
+    depends_on("py-scipy@1.13.1:",      type=("build","run"))
+    depends_on("py-numexpr@2.9.0:",    type=("build","run"))
+    depends_on("neuron@8.2.3:",         type=("build","run"))
     depends_on("py-pyswarms@1.3.0:",    type=("build","run"))
-    depends_on("py-psutil",             type=("build","run"))
+    depends_on("py-psutil@5.9.5:",      type=("build","run"))
     depends_on("py-cython",             type=("build","run"))
-    depends_on("py-numba@0.53.1:",      type=("build","run"))
-    depends_on("open3d+python",         type=("build","run"), when="@2:")
-
+    depends_on("py-numba@0.60.0:",      type=("build","run"))
+    depends_on("open3d+python@0.18:",   type=("build","run"), when="@2:")
 
     # snudda tarballs in pypi do not include the tests/ dir: just use default spack tests for now
     # @run_after('install')
diff --git a/packages/py-tvb-ext-bucket/package.py b/packages/py-tvb-ext-bucket/package.py
index 29fdae03f54907831d42980d75592040b0be5f71..ffb6558ea18c89bdd7264e7e64b898e5c4d10c5a 100644
--- a/packages/py-tvb-ext-bucket/package.py
+++ b/packages/py-tvb-ext-bucket/package.py
@@ -15,6 +15,7 @@ class PyTvbExtBucket(PythonPackage):
     depends_on('py-setuptools', type='build')
     depends_on('python@3.8:', type=('build', 'run'))
     depends_on('py-jupyter-server', type=('build', 'run'))
+    depends_on('py-jupyter-core', type=("build", "run"))
     depends_on('py-ebrains-drive@0.5.0:', type=('build', 'run'))
     depends_on('py-hatchling@1.5.0:', type='build')
     depends_on('py-jupyterlab@3.4.7:3', type=('build', 'run'))
diff --git a/packages/py-tvb-ext-unicore/package.py b/packages/py-tvb-ext-unicore/package.py
index 786e2f6e02dc0e61dbfadce7901cc7f6d6dc2a7e..f55bf951c3904601876a1db336cf01f651188d84 100644
--- a/packages/py-tvb-ext-unicore/package.py
+++ b/packages/py-tvb-ext-unicore/package.py
@@ -21,6 +21,7 @@ class PyTvbExtUnicore(PythonPackage):
     depends_on('py-hatch-jupyter-builder@0.5:', type='build')
     depends_on('py-jupyterlab@3.4.7:3', type=('build', 'run'))
     depends_on('py-jupyter-server', type=('build', 'run'))
+    depends_on('py-jupyter-core', type=("build", "run"))
     depends_on('py-pyunicore@1.0:', type=('build', 'run'))
     depends_on('py-jupyter-packaging@0.10:', type='build')
     depends_on('npm', type='build')
diff --git a/packages/py-tvb-ext-xircuits/package.py b/packages/py-tvb-ext-xircuits/package.py
index 23bd0db6c36c469890c74a9b0b252f4a651f241a..5f3b5e79e0b6c4dd0c4d138ad0e006dfc74c9f54 100644
--- a/packages/py-tvb-ext-xircuits/package.py
+++ b/packages/py-tvb-ext-xircuits/package.py
@@ -38,7 +38,7 @@ class PyTvbExtXircuits(PythonPackage):
     depends_on('py-tvb-framework', type=("build", "run"))
     depends_on('py-tvb-ext-bucket', type=("build", "run"))
     depends_on('py-tvb-ext-unicore', type=("build", "run"))
-    depends_on('py-tvb-widgets@1.0:', type=("build", "run"))
+    depends_on('py-tvb-widgets', type=("build", "run"))
 
     depends_on('py-pytest', type='test')
 
diff --git a/packages/py-vbi/package.py b/packages/py-vbi/package.py
index 2fee359023ae13a16de60e9b665cab638e01462d..238ccf0f1c2113db3e2f10e38123c3a2ca3c29cf 100644
--- a/packages/py-vbi/package.py
+++ b/packages/py-vbi/package.py
@@ -12,6 +12,8 @@ class PyVbi(PythonPackage, CudaPackage):
     git = "https://github.com/ins-amu/vbi"
     url = "https://github.com/ins-amu/vbi/archive/refs/tags/v0.1.3.tar.gz"
 
+    version("0.1.3.3", "a75041af2bd5dbf6e11970a82620f820c74cb124000403f711d657f347f18a88")
+    version("0.1.3.2", "6de0367b15834195ad1c14c61e4286875b5fe32d86975185977065765564576d")
     version("0.1.3.1", "53f085aba83129d01a8ad6d3deadc768db38b7a8545bcbc4427e70db2b6e66c0")
     version("0.1.3", "8ccccf2bf0def2bf97f4706b8597c4cb3ac5f0cf2ac5f08566e22cd6273c1163")
     version("0.1.2", "6ccfeeec718be62a480002a8370130a3e3344955186f99ecbb15b646b68210d6")
diff --git a/site-config/ebrainslab/spack.yaml b/site-config/ebrainslab/spack.yaml
index 5e2f936370d131492c7287699cbfe3714ca55340..2533621056e19d28234ff4cf3e40551e78daab22 100644
--- a/site-config/ebrainslab/spack.yaml
+++ b/site-config/ebrainslab/spack.yaml
@@ -7,7 +7,7 @@ spack:
     - py-notebook
     - r-irkernel
     # "collab"-specific constraint to match ("jupyterlab_widgets") in the base image
-    - py-ipywidgets
+    - py-ipywidgets@7.7.0
+    - py-jupyterlab-widgets@1.1.0
     # Collab utils
     - clb-nb-utils@0.1.0
-
diff --git a/spack.yaml b/spack.yaml
index 9c100ddac200ac359a9e259cb7847bb625ab78a3..da2a6fb26a1173f4bda40ceb4bc7397673e7b13e 100644
--- a/spack.yaml
+++ b/spack.yaml
@@ -18,7 +18,7 @@ spack:
     - jaxsnn@10.0-a1
     - py-bluepyefe@2.3.6
     - py-bluepymm@0.8.7
-    - py-bluepyopt@1.14.11
+    - py-bluepyopt@1.14.18
     - py-bsb-core@5.0.2
     - py-bsb-hdf5@5.0.2
     - py-bsb-yaml@4.2.2
@@ -51,7 +51,7 @@ spack:
     - py-quantities-scidash@0.12.4.3
     - py-quantities@0.16.0
     - py-siibra@1.0a9
-    - py-snudda@2.0.1
+    - py-snudda@2.1.10
     - py-spynnaker@7.3.0
     - py-tvb-framework@2.9
     - py-tvb-contrib@2.9.1
@@ -62,7 +62,7 @@ spack:
     - py-tvb-ext-unicore
     - py-tvb-ext-xircuits@1.1.0
     - py-viziphant@0.4.0
-    - py-vbi@0.1.3.1
+    - py-vbi@0.1.3.3
     - pynn-brainscales@10.0-a1
     - r-rgsl@0.1.1
     - r-sbtabvfgen@0.1